diff --git a/.venv/bin/Activate.ps1 b/.venv/bin/Activate.ps1 new file mode 100644 index 0000000..9d3646a --- /dev/null +++ b/.venv/bin/Activate.ps1 @@ -0,0 +1,241 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/.venv/bin/activate b/.venv/bin/activate new file mode 100644 index 0000000..6b2d304 --- /dev/null +++ b/.venv/bin/activate @@ -0,0 +1,66 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r 2> /dev/null + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV=/home/bishwajeet/neft_inward_file_based/.venv +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/"bin":$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + PS1='(.venv) '"${PS1:-}" + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r 2> /dev/null +fi diff --git a/.venv/bin/activate.csh b/.venv/bin/activate.csh new file mode 100644 index 0000000..c573634 --- /dev/null +++ b/.venv/bin/activate.csh @@ -0,0 +1,25 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV /home/bishwajeet/neft_inward_file_based/.venv + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/"bin":$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + set prompt = '(.venv) '"$prompt" +endif + +alias pydoc python -m pydoc + +rehash diff --git a/.venv/bin/activate.fish b/.venv/bin/activate.fish new file mode 100644 index 0000000..683f321 --- /dev/null +++ b/.venv/bin/activate.fish @@ -0,0 +1,64 @@ +# This file must be used with "source /bin/activate.fish" *from fish* +# (https://fishshell.com/); you cannot run it directly. + +function deactivate -d "Exit virtual environment and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self-destruct! + functions -e deactivate + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV /home/bishwajeet/neft_inward_file_based/.venv + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/"bin $PATH + +# Unset PYTHONHOME if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # Save the current fish_prompt function as the function _old_fish_prompt. + functions -c fish_prompt _old_fish_prompt + + # With the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command. + set -l old_status $status + + # Output the venv prompt; color taken from the blue of the Python logo. + printf "%s%s%s" (set_color 4B8BBE) '(.venv) ' (set_color normal) + + # Restore the return status of the previous command. + echo "exit $old_status" | . + # Output the original/"old" prompt. + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/.venv/bin/inv b/.venv/bin/inv new file mode 100755 index 0000000..c32a59d --- /dev/null +++ b/.venv/bin/inv @@ -0,0 +1,8 @@ +#!/home/bishwajeet/neft_inward_file_based/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from invoke.main import program +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(program.run()) diff --git a/.venv/bin/invoke b/.venv/bin/invoke new file mode 100755 index 0000000..c32a59d --- /dev/null +++ b/.venv/bin/invoke @@ -0,0 +1,8 @@ +#!/home/bishwajeet/neft_inward_file_based/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from invoke.main import program +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(program.run()) diff --git a/.venv/bin/pip b/.venv/bin/pip new file mode 100755 index 0000000..a88e7fa --- /dev/null +++ b/.venv/bin/pip @@ -0,0 +1,8 @@ +#!/home/bishwajeet/neft_inward_file_based/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/pip3 b/.venv/bin/pip3 new file mode 100755 index 0000000..a88e7fa --- /dev/null +++ b/.venv/bin/pip3 @@ -0,0 +1,8 @@ +#!/home/bishwajeet/neft_inward_file_based/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/pip3.9 b/.venv/bin/pip3.9 new file mode 100755 index 0000000..a88e7fa --- /dev/null +++ b/.venv/bin/pip3.9 @@ -0,0 +1,8 @@ +#!/home/bishwajeet/neft_inward_file_based/.venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/.venv/bin/python b/.venv/bin/python new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/.venv/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/.venv/bin/python3 b/.venv/bin/python3 new file mode 120000 index 0000000..ae65fda --- /dev/null +++ b/.venv/bin/python3 @@ -0,0 +1 @@ +/usr/bin/python3 \ No newline at end of file diff --git a/.venv/bin/python3.9 b/.venv/bin/python3.9 new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/.venv/bin/python3.9 @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/.venv/lib/python3.9/site-packages/__pycache__/typing_extensions.cpython-39.pyc b/.venv/lib/python3.9/site-packages/__pycache__/typing_extensions.cpython-39.pyc new file mode 100644 index 0000000..176afbb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/__pycache__/typing_extensions.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/_cffi_backend.cpython-39-x86_64-linux-gnu.so b/.venv/lib/python3.9/site-packages/_cffi_backend.cpython-39-x86_64-linux-gnu.so new file mode 100755 index 0000000..7341518 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/_cffi_backend.cpython-39-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.9/site-packages/_distutils_hack/__init__.py b/.venv/lib/python3.9/site-packages/_distutils_hack/__init__.py new file mode 100644 index 0000000..f707416 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/_distutils_hack/__init__.py @@ -0,0 +1,132 @@ +import sys +import os +import re +import importlib +import warnings + + +is_pypy = '__pypy__' in sys.builtin_module_names + + +warnings.filterwarnings('ignore', + r'.+ distutils\b.+ deprecated', + DeprecationWarning) + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + if is_pypy and sys.version_info < (3, 7): + # PyPy for 3.6 unconditionally imports distutils, so bypass the warning + # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 + return + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils.") + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + warnings.warn("Setuptools is replacing distutils.") + mods = [name for name in sys.modules if re.match(r'distutils\b', name)] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib') + return which == 'local' + + +def ensure_local_distutils(): + clear_distutils() + + # With the DistutilsMetaFinder in place, + # perform an import to cause distutils to be + # loaded from setuptools._distutils. Ref #2906. + add_shim() + importlib.import_module('distutils') + remove_shim() + + # check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + if path is not None: + return + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + import importlib.abc + import importlib.util + + class DistutilsLoader(importlib.abc.Loader): + + def create_module(self, spec): + return importlib.import_module('setuptools._distutils') + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader('distutils', DistutilsLoader()) + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if self.pip_imported_during_build(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @staticmethod + def pip_imported_during_build(): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + return any( + frame.f_globals['__file__'].endswith('setup.py') + for frame, line in traceback.walk_stack(None) + ) + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass diff --git a/.venv/lib/python3.9/site-packages/_distutils_hack/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/_distutils_hack/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..8393344 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/_distutils_hack/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/_distutils_hack/__pycache__/override.cpython-39.pyc b/.venv/lib/python3.9/site-packages/_distutils_hack/__pycache__/override.cpython-39.pyc new file mode 100644 index 0000000..4af2974 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/_distutils_hack/__pycache__/override.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/_distutils_hack/override.py b/.venv/lib/python3.9/site-packages/_distutils_hack/override.py new file mode 100644 index 0000000..2cc433a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/METADATA b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/METADATA new file mode 100644 index 0000000..629a992 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/METADATA @@ -0,0 +1,343 @@ +Metadata-Version: 2.4 +Name: bcrypt +Version: 5.0.0 +Summary: Modern password hashing for your software and your servers +Author-email: The Python Cryptographic Authority developers +License: Apache-2.0 +Project-URL: homepage, https://github.com/pyca/bcrypt/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Free Threading :: 3 - Stable +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Provides-Extra: tests +Requires-Dist: pytest!=3.3.0,>=3.2.1; extra == "tests" +Provides-Extra: typecheck +Requires-Dist: mypy; extra == "typecheck" +Dynamic: license-file + +bcrypt +====== + +.. image:: https://img.shields.io/pypi/v/bcrypt.svg + :target: https://pypi.org/project/bcrypt/ + :alt: Latest Version + +.. image:: https://github.com/pyca/bcrypt/workflows/CI/badge.svg?branch=main + :target: https://github.com/pyca/bcrypt/actions?query=workflow%3ACI+branch%3Amain + +Acceptable password hashing for your software and your servers (but you should +really use argon2id or scrypt) + + +Installation +============ + +To install bcrypt, simply: + +.. code:: console + + $ pip install bcrypt + +Note that bcrypt should build very easily on Linux provided you have a C +compiler and a Rust compiler (the minimum supported Rust version is 1.56.0). + +For Debian and Ubuntu, the following command will ensure that the required dependencies are installed: + +.. code:: console + + $ sudo apt-get install build-essential cargo + +For Fedora and RHEL-derivatives, the following command will ensure that the required dependencies are installed: + +.. code:: console + + $ sudo yum install gcc cargo + +For Alpine, the following command will ensure that the required dependencies are installed: + +.. code:: console + + $ apk add --update musl-dev gcc cargo + + +Alternatives +============ + +While bcrypt remains an acceptable choice for password storage, depending on your specific use case you may also want to consider using scrypt (either via `standard library`_ or `cryptography`_) or argon2id via `argon2_cffi`_. + +Changelog +========= + +5.0.0 +----- + +* Bumped MSRV to 1.74. +* Added support for Python 3.14 and free-threaded Python 3.14. +* Added support for Windows on ARM. +* Passing ``hashpw`` a password longer than 72 bytes now raises a + ``ValueError``. Previously the password was silently truncated, following the + behavior of the original OpenBSD ``bcrypt`` implementation. + +4.3.0 +----- + +* Dropped support for Python 3.7. +* We now support free-threaded Python 3.13. +* We now support PyPy 3.11. +* We now publish wheels for free-threaded Python 3.13, for PyPy 3.11 on + ``manylinux``, and for ARMv7l on ``manylinux``. + +4.2.1 +----- + +* Bump Rust dependency versions - this should resolve crashes on Python 3.13 + free-threaded builds. +* We no longer build ``manylinux`` wheels for PyPy 3.9. + +4.2.0 +----- + +* Bump Rust dependency versions +* Removed the ``BCRYPT_ALLOW_RUST_163`` environment variable. + +4.1.3 +----- + +* Bump Rust dependency versions + +4.1.2 +----- + +* Publish both ``py37`` and ``py39`` wheels. This should resolve some errors + relating to initializing a module multiple times per process. + +4.1.1 +----- + +* Fixed the type signature on the ``kdf`` method. +* Fixed packaging bug on Windows. +* Fixed incompatibility with passlib package detection assumptions. + +4.1.0 +----- + +* Dropped support for Python 3.6. +* Bumped MSRV to 1.64. (Note: Rust 1.63 can be used by setting the ``BCRYPT_ALLOW_RUST_163`` environment variable) + +4.0.1 +----- + +* We now build PyPy ``manylinux`` wheels. +* Fixed a bug where passing an invalid ``salt`` to ``checkpw`` could result in + a ``pyo3_runtime.PanicException``. It now correctly raises a ``ValueError``. + +4.0.0 +----- + +* ``bcrypt`` is now implemented in Rust. Users building from source will need + to have a Rust compiler available. Nothing will change for users downloading + wheels. +* We no longer ship ``manylinux2010`` wheels. Users should upgrade to the latest + ``pip`` to ensure this doesn’t cause issues downloading wheels on their + platform. We now ship ``manylinux_2_28`` wheels for users on new enough platforms. +* ``NUL`` bytes are now allowed in inputs. + + +3.2.2 +----- + +* Fixed packaging of ``py.typed`` files in wheels so that ``mypy`` works. + +3.2.1 +----- + +* Added support for compilation on z/OS +* The next release of ``bcrypt`` with be 4.0 and it will require Rust at + compile time, for users building from source. There will be no additional + requirement for users who are installing from wheels. Users on most + platforms will be able to obtain a wheel by making sure they have an up to + date ``pip``. The minimum supported Rust version will be 1.56.0. +* This will be the final release for which we ship ``manylinux2010`` wheels. + Going forward the minimum supported manylinux ABI for our wheels will be + ``manylinux2014``. The vast majority of users will continue to receive + ``manylinux`` wheels provided they have an up to date ``pip``. + + +3.2.0 +----- + +* Added typehints for library functions. +* Dropped support for Python versions less than 3.6 (2.7, 3.4, 3.5). +* Shipped ``abi3`` Windows wheels (requires pip >= 20). + +3.1.7 +----- + +* Set a ``setuptools`` lower bound for PEP517 wheel building. +* We no longer distribute 32-bit ``manylinux1`` wheels. Continuing to produce + them was a maintenance burden. + +3.1.6 +----- + +* Added support for compilation on Haiku. + +3.1.5 +----- + +* Added support for compilation on AIX. +* Dropped Python 2.6 and 3.3 support. +* Switched to using ``abi3`` wheels for Python 3. If you are not getting a + wheel on a compatible platform please upgrade your ``pip`` version. + +3.1.4 +----- + +* Fixed compilation with mingw and on illumos. + +3.1.3 +----- +* Fixed a compilation issue on Solaris. +* Added a warning when using too few rounds with ``kdf``. + +3.1.2 +----- +* Fixed a compile issue affecting big endian platforms. +* Fixed invalid escape sequence warnings on Python 3.6. +* Fixed building in non-UTF8 environments on Python 2. + +3.1.1 +----- +* Resolved a ``UserWarning`` when used with ``cffi`` 1.8.3. + +3.1.0 +----- +* Added support for ``checkpw``, a convenience method for verifying a password. +* Ensure that you get a ``$2y$`` hash when you input a ``$2y$`` salt. +* Fixed a regression where ``$2a`` hashes were vulnerable to a wraparound bug. +* Fixed compilation under Alpine Linux. + +3.0.0 +----- +* Switched the C backend to code obtained from the OpenBSD project rather than + openwall. +* Added support for ``bcrypt_pbkdf`` via the ``kdf`` function. + +2.0.0 +----- +* Added support for an adjustible prefix when calling ``gensalt``. +* Switched to CFFI 1.0+ + +Usage +----- + +Password Hashing +~~~~~~~~~~~~~~~~ + +Hashing and then later checking that a password matches the previous hashed +password is very simple: + +.. code:: pycon + + >>> import bcrypt + >>> password = b"super secret password" + >>> # Hash a password for the first time, with a randomly-generated salt + >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt()) + >>> # Check that an unhashed password matches one that has previously been + >>> # hashed + >>> if bcrypt.checkpw(password, hashed): + ... print("It Matches!") + ... else: + ... print("It Does not Match :(") + +KDF +~~~ + +As of 3.0.0 ``bcrypt`` now offers a ``kdf`` function which does ``bcrypt_pbkdf``. +This KDF is used in OpenSSH's newer encrypted private key format. + +.. code:: pycon + + >>> import bcrypt + >>> key = bcrypt.kdf( + ... password=b'password', + ... salt=b'salt', + ... desired_key_bytes=32, + ... rounds=100) + + +Adjustable Work Factor +~~~~~~~~~~~~~~~~~~~~~~ +One of bcrypt's features is an adjustable logarithmic work factor. To adjust +the work factor merely pass the desired number of rounds to +``bcrypt.gensalt(rounds=12)`` which defaults to 12): + +.. code:: pycon + + >>> import bcrypt + >>> password = b"super secret password" + >>> # Hash a password for the first time, with a certain number of rounds + >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt(14)) + >>> # Check that a unhashed password matches one that has previously been + >>> # hashed + >>> if bcrypt.checkpw(password, hashed): + ... print("It Matches!") + ... else: + ... print("It Does not Match :(") + + +Adjustable Prefix +~~~~~~~~~~~~~~~~~ + +Another one of bcrypt's features is an adjustable prefix to let you define what +libraries you'll remain compatible with. To adjust this, pass either ``2a`` or +``2b`` (the default) to ``bcrypt.gensalt(prefix=b"2b")`` as a bytes object. + +As of 3.0.0 the ``$2y$`` prefix is still supported in ``hashpw`` but deprecated. + +Maximum Password Length +~~~~~~~~~~~~~~~~~~~~~~~ + +The bcrypt algorithm only handles passwords up to 72 characters, any characters +beyond that are ignored. To work around this, a common approach is to hash a +password with a cryptographic hash (such as ``sha256``) and then base64 +encode it to prevent NULL byte problems before hashing the result with +``bcrypt``: + +.. code:: pycon + + >>> password = b"an incredibly long password" * 10 + >>> hashed = bcrypt.hashpw( + ... base64.b64encode(hashlib.sha256(password).digest()), + ... bcrypt.gensalt() + ... ) + +Compatibility +------------- + +This library should be compatible with py-bcrypt and it will run on Python +3.8+ (including free-threaded builds), and PyPy 3. + +Security +-------- + +``bcrypt`` follows the `same security policy as cryptography`_, if you +identify a vulnerability, we ask you to contact us privately. + +.. _`same security policy as cryptography`: https://cryptography.io/en/latest/security.html +.. _`standard library`: https://docs.python.org/3/library/hashlib.html#hashlib.scrypt +.. _`argon2_cffi`: https://argon2-cffi.readthedocs.io +.. _`cryptography`: https://cryptography.io/en/latest/hazmat/primitives/key-derivation-functions/#cryptography.hazmat.primitives.kdf.scrypt.Scrypt diff --git a/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/RECORD b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/RECORD new file mode 100644 index 0000000..b86e0d2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/RECORD @@ -0,0 +1,11 @@ +bcrypt-5.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +bcrypt-5.0.0.dist-info/METADATA,sha256=yV1BfLlI6udlVy23eNbzDa62DSEbUrlWvlLBCI6UAdI,10524 +bcrypt-5.0.0.dist-info/RECORD,, +bcrypt-5.0.0.dist-info/WHEEL,sha256=WieEZvWpc0Erab6-NfTu9412g-GcE58js6gvBn3Q7B4,111 +bcrypt-5.0.0.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850 +bcrypt-5.0.0.dist-info/top_level.txt,sha256=BkR_qBzDbSuycMzHWE1vzXrfYecAzUVmQs6G2CukqNI,7 +bcrypt/__init__.py,sha256=cv-NupIX6P7o6A4PK_F0ur6IZoDr3GnvyzFO9k16wKQ,1000 +bcrypt/__init__.pyi,sha256=ITUCB9mPVU8sKUbJQMDUH5YfQXZb1O55F9qvKZR_o8I,333 +bcrypt/__pycache__/__init__.cpython-39.pyc,, +bcrypt/_bcrypt.abi3.so,sha256=oFwJu4Gq44FqJDttx_oWpypfuUQ30BkCWzD2FhojdYw,631768 +bcrypt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/WHEEL new file mode 100644 index 0000000..eb203c1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp39-abi3-manylinux_2_34_x86_64 + diff --git a/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/licenses/LICENSE b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..11069ed --- /dev/null +++ b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/licenses/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..7f0b6e7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/bcrypt-5.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +bcrypt diff --git a/.venv/lib/python3.9/site-packages/bcrypt/__init__.py b/.venv/lib/python3.9/site-packages/bcrypt/__init__.py new file mode 100644 index 0000000..81a92fd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/bcrypt/__init__.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ._bcrypt import ( + __author__, + __copyright__, + __email__, + __license__, + __summary__, + __title__, + __uri__, + checkpw, + gensalt, + hashpw, + kdf, +) +from ._bcrypt import ( + __version_ex__ as __version__, +) + +__all__ = [ + "__author__", + "__copyright__", + "__email__", + "__license__", + "__summary__", + "__title__", + "__uri__", + "__version__", + "checkpw", + "gensalt", + "hashpw", + "kdf", +] diff --git a/.venv/lib/python3.9/site-packages/bcrypt/__init__.pyi b/.venv/lib/python3.9/site-packages/bcrypt/__init__.pyi new file mode 100644 index 0000000..12e4a2e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/bcrypt/__init__.pyi @@ -0,0 +1,10 @@ +def gensalt(rounds: int = 12, prefix: bytes = b"2b") -> bytes: ... +def hashpw(password: bytes, salt: bytes) -> bytes: ... +def checkpw(password: bytes, hashed_password: bytes) -> bool: ... +def kdf( + password: bytes, + salt: bytes, + desired_key_bytes: int, + rounds: int, + ignore_few_rounds: bool = False, +) -> bytes: ... diff --git a/.venv/lib/python3.9/site-packages/bcrypt/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/bcrypt/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..ef40ee4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/bcrypt/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/bcrypt/_bcrypt.abi3.so b/.venv/lib/python3.9/site-packages/bcrypt/_bcrypt.abi3.so new file mode 100755 index 0000000..4806fec Binary files /dev/null and b/.venv/lib/python3.9/site-packages/bcrypt/_bcrypt.abi3.so differ diff --git a/.venv/lib/python3.9/site-packages/bcrypt/py.typed b/.venv/lib/python3.9/site-packages/bcrypt/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/METADATA b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/METADATA new file mode 100644 index 0000000..67508e5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/METADATA @@ -0,0 +1,68 @@ +Metadata-Version: 2.4 +Name: cffi +Version: 2.0.0 +Summary: Foreign Function Interface for Python calling C code. +Author: Armin Rigo, Maciej Fijalkowski +Maintainer: Matt Davis, Matt Clay, Matti Picus +License-Expression: MIT +Project-URL: Documentation, https://cffi.readthedocs.io/ +Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html +Project-URL: Downloads, https://github.com/python-cffi/cffi/releases +Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi +Project-URL: Source Code, https://github.com/python-cffi/cffi +Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Free Threading :: 2 - Beta +Classifier: Programming Language :: Python :: Implementation :: CPython +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: AUTHORS +Requires-Dist: pycparser; implementation_name != "PyPy" +Dynamic: license-file + +[![GitHub Actions Status](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml/badge.svg?branch=main)](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml?query=branch%3Amain++) +[![PyPI version](https://img.shields.io/pypi/v/cffi.svg)](https://pypi.org/project/cffi) +[![Read the Docs](https://img.shields.io/badge/docs-latest-blue.svg)][Documentation] + + +CFFI +==== + +Foreign Function Interface for Python calling C code. + +Please see the [Documentation] or uncompiled in the `doc/` subdirectory. + +Download +-------- + +[Download page](https://github.com/python-cffi/cffi/releases) + +Source Code +----------- + +Source code is publicly available on +[GitHub](https://github.com/python-cffi/cffi). + +Contact +------- + +[Mailing list](https://groups.google.com/forum/#!forum/python-cffi) + +Testing/development tips +------------------------ + +After `git clone` or `wget && tar`, we will get a directory called `cffi` or `cffi-x.x.x`. we call it `repo-directory`. To run tests under CPython, run the following in the `repo-directory`: + + pip install pytest + pip install -e . # editable install of CFFI for local development + pytest src/c/ testing/ + +[Documentation]: http://cffi.readthedocs.org/ diff --git a/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/RECORD b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/RECORD new file mode 100644 index 0000000..25e5445 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/RECORD @@ -0,0 +1,49 @@ +_cffi_backend.cpython-39-x86_64-linux-gnu.so,sha256=9J_Iuv_0UXORJQsmFFKfZwHTQDTC313tRixPBAunqCU,339784 +cffi-2.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cffi-2.0.0.dist-info/METADATA,sha256=uYzn40F68Im8EtXHNBLZs7FoPM-OxzyYbDWsjJvhujk,2559 +cffi-2.0.0.dist-info/RECORD,, +cffi-2.0.0.dist-info/WHEEL,sha256=A4SdjgbIBZ_kjTsEuOcR8fCpVKV5dez8wjeT_kpjoQs,147 +cffi-2.0.0.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75 +cffi-2.0.0.dist-info/licenses/AUTHORS,sha256=KmemC7-zN1nWfWRf8TG45ta8TK_CMtdR_Kw-2k0xTMg,208 +cffi-2.0.0.dist-info/licenses/LICENSE,sha256=W6JN3FcGf5JJrdZEw6_EGl1tw34jQz73Wdld83Cwr2M,1123 +cffi-2.0.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi/__init__.py,sha256=-ksBQ7MfDzVvbBlV_ftYBWAmEqfA86ljIzMxzaZeAlI,511 +cffi/__pycache__/__init__.cpython-39.pyc,, +cffi/__pycache__/_imp_emulation.cpython-39.pyc,, +cffi/__pycache__/_shimmed_dist_utils.cpython-39.pyc,, +cffi/__pycache__/api.cpython-39.pyc,, +cffi/__pycache__/backend_ctypes.cpython-39.pyc,, +cffi/__pycache__/cffi_opcode.cpython-39.pyc,, +cffi/__pycache__/commontypes.cpython-39.pyc,, +cffi/__pycache__/cparser.cpython-39.pyc,, +cffi/__pycache__/error.cpython-39.pyc,, +cffi/__pycache__/ffiplatform.cpython-39.pyc,, +cffi/__pycache__/lock.cpython-39.pyc,, +cffi/__pycache__/model.cpython-39.pyc,, +cffi/__pycache__/pkgconfig.cpython-39.pyc,, +cffi/__pycache__/recompiler.cpython-39.pyc,, +cffi/__pycache__/setuptools_ext.cpython-39.pyc,, +cffi/__pycache__/vengine_cpy.cpython-39.pyc,, +cffi/__pycache__/vengine_gen.cpython-39.pyc,, +cffi/__pycache__/verifier.cpython-39.pyc,, +cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 +cffi/_cffi_include.h,sha256=Exhmgm9qzHWzWivjfTe0D7Xp4rPUkVxdNuwGhMTMzbw,15055 +cffi/_embedding.h,sha256=Ai33FHblE7XSpHOCp8kPcWwN5_9BV14OvN0JVa6ITpw,18786 +cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960 +cffi/_shimmed_dist_utils.py,sha256=Bjj2wm8yZbvFvWEx5AEfmqaqZyZFhYfoyLLQHkXZuao,2230 +cffi/api.py,sha256=alBv6hZQkjpmZplBphdaRn2lPO9-CORs_M7ixabvZWI,42169 +cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 +cffi/cffi_opcode.py,sha256=JDV5l0R0_OadBX_uE7xPPTYtMdmpp8I9UYd6av7aiDU,5731 +cffi/commontypes.py,sha256=7N6zPtCFlvxXMWhHV08psUjdYIK2XgsN3yo5dgua_v4,2805 +cffi/cparser.py,sha256=QUTfmlL-aO-MYR8bFGlvAUHc36OQr7XYLe0WLkGFjRo,44790 +cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 +cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/model.py,sha256=W30UFQZE73jL5Mx5N81YT77us2W2iJjTm0XYfnwz1cg,21797 +cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 +cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 +cffi/recompiler.py,sha256=78J6lMEEOygXNmjN9-fOFFO3j7eW-iFxSrxfvQb54bY,65509 +cffi/setuptools_ext.py,sha256=0rCwBJ1W7FHWtiMKfNXsSST88V8UXrui5oeXFlDNLG8,9411 +cffi/vengine_cpy.py,sha256=oyQKD23kpE0aChUKA8Jg0e723foPiYzLYEdb-J0MiNs,43881 +cffi/vengine_gen.py,sha256=DUlEIrDiVin1Pnhn1sfoamnS5NLqfJcOdhRoeSNeJRg,26939 +cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182 diff --git a/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/WHEEL new file mode 100644 index 0000000..fb8eff6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp39-cp39-manylinux_2_17_x86_64 +Tag: cp39-cp39-manylinux2014_x86_64 + diff --git a/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/entry_points.txt b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/entry_points.txt new file mode 100644 index 0000000..4b0274f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules diff --git a/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS new file mode 100644 index 0000000..370a25d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS @@ -0,0 +1,8 @@ +This package has been mostly done by Armin Rigo with help from +Maciej Fijałkowski. The idea is heavily based (although not directly +copied) from LuaJIT ffi by Mike Pall. + + +Other contributors: + + Google Inc. diff --git a/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..0a1dbfb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE @@ -0,0 +1,23 @@ + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + MIT No Attribution + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + diff --git a/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..f645779 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi-2.0.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/.venv/lib/python3.9/site-packages/cffi/__init__.py b/.venv/lib/python3.9/site-packages/cffi/__init__.py new file mode 100644 index 0000000..c99ec3d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "2.0.0" +__version_info__ = (2, 0, 0) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e4e45ca Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/_imp_emulation.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/_imp_emulation.cpython-39.pyc new file mode 100644 index 0000000..31f49a1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/_imp_emulation.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-39.pyc new file mode 100644 index 0000000..22d7fff Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/api.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/api.cpython-39.pyc new file mode 100644 index 0000000..093e916 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/api.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/backend_ctypes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/backend_ctypes.cpython-39.pyc new file mode 100644 index 0000000..f84cd87 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/backend_ctypes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/cffi_opcode.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/cffi_opcode.cpython-39.pyc new file mode 100644 index 0000000..03b26f8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/cffi_opcode.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/commontypes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/commontypes.cpython-39.pyc new file mode 100644 index 0000000..a0a47dd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/commontypes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/cparser.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/cparser.cpython-39.pyc new file mode 100644 index 0000000..5afc490 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/cparser.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/error.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/error.cpython-39.pyc new file mode 100644 index 0000000..b208abe Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/error.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/ffiplatform.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/ffiplatform.cpython-39.pyc new file mode 100644 index 0000000..7556a0d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/ffiplatform.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/lock.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/lock.cpython-39.pyc new file mode 100644 index 0000000..764ab4d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/lock.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/model.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/model.cpython-39.pyc new file mode 100644 index 0000000..7dff572 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/model.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/pkgconfig.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/pkgconfig.cpython-39.pyc new file mode 100644 index 0000000..7e0280d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/pkgconfig.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/recompiler.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/recompiler.cpython-39.pyc new file mode 100644 index 0000000..31d7d8c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/recompiler.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/setuptools_ext.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/setuptools_ext.cpython-39.pyc new file mode 100644 index 0000000..b7c6b3d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/setuptools_ext.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/vengine_cpy.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/vengine_cpy.cpython-39.pyc new file mode 100644 index 0000000..4fbcc3f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/vengine_cpy.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/vengine_gen.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/vengine_gen.cpython-39.pyc new file mode 100644 index 0000000..014205d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/vengine_gen.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/__pycache__/verifier.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cffi/__pycache__/verifier.cpython-39.pyc new file mode 100644 index 0000000..4502702 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cffi/__pycache__/verifier.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cffi/_cffi_errors.h b/.venv/lib/python3.9/site-packages/cffi/_cffi_errors.h new file mode 100644 index 0000000..158e059 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/_cffi_errors.h @@ -0,0 +1,149 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + " def flush(self):\n" + " pass\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/.venv/lib/python3.9/site-packages/cffi/_cffi_include.h b/.venv/lib/python3.9/site-packages/cffi/_cffi_include.h new file mode 100644 index 0000000..908a1d7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/_cffi_include.h @@ -0,0 +1,389 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + The implementation is messy (issue #350): on Windows, with _MSC_VER, + we have to define Py_LIMITED_API even before including pyconfig.h. + In that case, we guess what pyconfig.h will do to the macros above, + and check our guess after the #include. + + Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv + version >= 16.0.0. With older versions of either, you don't get a + copy of PYTHON3.DLL in the virtualenv. We can't check the version of + CPython *before* we even include pyconfig.h. ffi.set_source() puts + a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is + running on Windows < 3.5, as an attempt at fixing it, but that's + arguably wrong because it may not be the target version of Python. + Still better than nothing I guess. As another workaround, you can + remove the definition of Py_LIMITED_API here. + + See also 'py_limited_api' in cffi/setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# ifdef _MSC_VER +# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# include + /* sanity-check: Py_LIMITED_API will cause crashes if any of these + are also defined. Normally, the Python file PC/pyconfig.h does not + cause any of these to be defined, with the exception that _DEBUG + causes Py_DEBUG. Double-check that. */ +# ifdef Py_LIMITED_API +# if defined(Py_DEBUG) +# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" +# endif +# if defined(Py_TRACE_REFS) +# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" +# endif +# if defined(Py_REF_DEBUG) +# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" +# endif +# endif +# else +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +_CFFI_UNUSED_FN static int +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +_CFFI_UNUSED_FN static void +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/.venv/lib/python3.9/site-packages/cffi/_embedding.h b/.venv/lib/python3.9/site-packages/cffi/_embedding.h new file mode 100644 index 0000000..64c04f6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/_embedding.h @@ -0,0 +1,550 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. If _cffi_start_python() fails, then this is set + to NULL; otherwise, it should never be NULL. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 2.0.0" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +#if PY_VERSION_HEX < 0x03080000 +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ +#endif + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + + In Python >= 3.12, this stopped working because that particular + tp_version_tag gets modified during interpreter startup. It's + arguably a bad idea before 3.12 too, but again we can't change + that because someone might use a mixture of cffi embedded + modules, and no-one reported a bug so far. In Python >= 3.12 + we go instead for PyCapsuleType.tp_as_buffer, which is supposed + to always be NULL. We write to it temporarily a pointer to + a struct full of NULLs, which is semantically the same. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else +# if PY_VERSION_HEX < 0x030C0000 + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value = -42; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); +# else + static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs; + empty_buffer_procs.mark = -42; + PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *) + &PyCapsule_Type.tp_as_buffer; + PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf; +# endif + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { +# if PY_VERSION_HEX < 0x030C0000 + assert(old_value == locked_value); +# else + /* The pointer should point to a possibly different + empty_buffer_procs from another C extension module */ + assert(((struct ebp_s *)old_value)->mark == -42); +# endif + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); +#if PY_VERSION_HEX < 0x03070000 + PyEval_InitThreads(); +#endif + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { +#if PY_VERSION_HEX < 0x03070000 + /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); +#endif + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void *func; /* function pointer */ + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/.venv/lib/python3.9/site-packages/cffi/_imp_emulation.py b/.venv/lib/python3.9/site-packages/cffi/_imp_emulation.py new file mode 100644 index 0000000..136abdd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/_imp_emulation.py @@ -0,0 +1,83 @@ + +try: + # this works on Python < 3.12 + from imp import * + +except ImportError: + # this is a limited emulation for Python >= 3.12. + # Note that this is used only for tests or for the old ffi.verify(). + # This is copied from the source code of Python 3.11. + + from _imp import (acquire_lock, release_lock, + is_builtin, is_frozen) + + from importlib._bootstrap import _load + + from importlib import machinery + import os + import sys + import tokenize + + SEARCH_ERROR = 0 + PY_SOURCE = 1 + PY_COMPILED = 2 + C_EXTENSION = 3 + PY_RESOURCE = 4 + PKG_DIRECTORY = 5 + C_BUILTIN = 6 + PY_FROZEN = 7 + PY_CODERESOURCE = 8 + IMP_HOOK = 9 + + def get_suffixes(): + extensions = [(s, 'rb', C_EXTENSION) + for s in machinery.EXTENSION_SUFFIXES] + source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] + bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] + return extensions + source + bytecode + + def find_module(name, path=None): + if not isinstance(name, str): + raise TypeError("'name' must be a str, not {}".format(type(name))) + elif not isinstance(path, (type(None), list)): + # Backwards-compatibility + raise RuntimeError("'path' must be None or a list, " + "not {}".format(type(path))) + + if path is None: + if is_builtin(name): + return None, None, ('', '', C_BUILTIN) + elif is_frozen(name): + return None, None, ('', '', PY_FROZEN) + else: + path = sys.path + + for entry in path: + package_directory = os.path.join(entry, name) + for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: + package_file_name = '__init__' + suffix + file_path = os.path.join(package_directory, package_file_name) + if os.path.isfile(file_path): + return None, package_directory, ('', '', PKG_DIRECTORY) + for suffix, mode, type_ in get_suffixes(): + file_name = name + suffix + file_path = os.path.join(entry, file_name) + if os.path.isfile(file_path): + break + else: + continue + break # Break out of outer loop when breaking out of inner loop. + else: + raise ImportError(name, name=name) + + encoding = None + if 'b' not in mode: + with open(file_path, 'rb') as file: + encoding = tokenize.detect_encoding(file.readline)[0] + file = open(file_path, mode, encoding=encoding) + return file, file_path, (suffix, mode, type_) + + def load_dynamic(name, path, file=None): + loader = machinery.ExtensionFileLoader(name, path) + spec = machinery.ModuleSpec(name=name, loader=loader, origin=path) + return _load(spec) diff --git a/.venv/lib/python3.9/site-packages/cffi/_shimmed_dist_utils.py b/.venv/lib/python3.9/site-packages/cffi/_shimmed_dist_utils.py new file mode 100644 index 0000000..c3d2312 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/_shimmed_dist_utils.py @@ -0,0 +1,45 @@ +""" +Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful +error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken. + +This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI. +""" +import sys + +try: + # import setuptools first; this is the most robust way to ensure its embedded distutils is available + # (the .pth shim should usually work, but this is even more robust) + import setuptools +except Exception as ex: + if sys.version_info >= (3, 12): + # Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex + + # silently ignore on older Pythons (support fallback to stdlib distutils where available) +else: + del setuptools + +try: + # bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils + from distutils import log, sysconfig + from distutils.ccompiler import CCompiler + from distutils.command.build_ext import build_ext + from distutils.core import Distribution, Extension + from distutils.dir_util import mkpath + from distutils.errors import DistutilsSetupError, CompileError, LinkError + from distutils.log import set_threshold, set_verbosity + + if sys.platform == 'win32': + try: + # FUTURE: msvc9compiler module was removed in setuptools 74; consider removing, as it's only used by an ancient patch in `recompiler` + from distutils.msvc9compiler import MSVCCompiler + except ImportError: + MSVCCompiler = None +except Exception as ex: + if sys.version_info >= (3, 12): + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex + + # anything older, just let the underlying distutils import error fly + raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex + +del sys diff --git a/.venv/lib/python3.9/site-packages/cffi/api.py b/.venv/lib/python3.9/site-packages/cffi/api.py new file mode 100644 index 0000000..5a474f3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/api.py @@ -0,0 +1,967 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + if not (isinstance(name, basestring) or + name is None or + isinstance(name, self.CData)): + raise TypeError("dlopen(name): name must be a file name, None, " + "or an already-opened 'void *' handle") + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from cffi._shimmed_dist_utils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from cffi._shimmed_dist_utils import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, + uses_ffiplatform=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, + uses_ffiplatform=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if not isinstance(name, basestring): + if sys.platform != "win32" or name is not None: + return backend.load_library(name, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if isinstance(libname, basestring): + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/.venv/lib/python3.9/site-packages/cffi/backend_ctypes.py b/.venv/lib/python3.9/site-packages/cffi/backend_ctypes.py new file mode 100644 index 0000000..e7956a7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return int(self._value) + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/.venv/lib/python3.9/site-packages/cffi/cffi_opcode.py b/.venv/lib/python3.9/site-packages/cffi/cffi_opcode.py new file mode 100644 index 0000000..6421df6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + '_cffi_float_complex_t': PRIM_FLOATCOMPLEX, + '_cffi_double_complex_t': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/.venv/lib/python3.9/site-packages/cffi/commontypes.py b/.venv/lib/python3.9/site-packages/cffi/commontypes.py new file mode 100644 index 0000000..d4dae35 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/commontypes.py @@ -0,0 +1,82 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above +COMMON_TYPES['float _Complex'] = '_cffi_float_complex_t' +COMMON_TYPES['double _Complex'] = '_cffi_double_complex_t' + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/.venv/lib/python3.9/site-packages/cffi/cparser.py b/.venv/lib/python3.9/site-packages/cffi/cparser.py new file mode 100644 index 0000000..dd590d8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/cparser.py @@ -0,0 +1,1015 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis in between, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' not in csource: + return + for line in csource.splitlines(): + if '"' in line and not line.lstrip().startswith('#'): + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + break + +def _warn_for_non_extern_non_static_global_variable(decl): + if not decl.storage: + import warnings + warnings.warn("Global variable '%s' in cdef(): for consistency " + "with C it should have a storage class specifier " + "(usually 'extern')" % (decl.name,)) + +def _remove_line_directives(csource): + # _r_line_directive matches whole lines, without the final \n, if they + # start with '#line' with some spacing allowed, or '#NUMBER'. This + # function stores them away and replaces them with exactly the string + # '#line@N', where N is the index in the list 'line_directives'. + line_directives = [] + def replace(m): + i = len(line_directives) + line_directives.append(m.group()) + return '#line@%d' % i + csource = _r_line_directive.sub(replace, csource) + return csource, line_directives + +def _put_back_line_directives(csource, line_directives): + def replace(m): + s = m.group() + if not s.startswith('#line@'): + raise AssertionError("unexpected #line directive " + "(should have been processed and removed") + return line_directives[int(s[6:])] + return _r_line_directive.sub(replace, csource) + +def _preprocess(csource): + # First, remove the lines of the form '#line N "filename"' because + # the "filename" part could confuse the rest + csource, line_directives = _remove_line_directives(csource) + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literals (except in line directives)! + def replace_keeping_newlines(m): + return ' ' + m.group().count('\n') * '\n' + csource = _r_comment.sub(replace_keeping_newlines, csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + csource = csource.replace('...', ' __dotdotdot__ ') + # Finally, put back the line directives + csource = _put_back_line_directives(csource, line_directives) + return csource, macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + csourcelines.append('') # see test_missing_newline_bug + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True, + typedef_example="*(%s *)0" % (decl.name,)) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + # skip pragma, only in pycparser 2.15 + import warnings + warnings.warn( + "#pragma in cdef() are entirely ignored. " + "They should be removed for now, otherwise your " + "code might behave differently in a future version " + "of CFFI if #pragma support gets added. Note that " + "'#pragma pack' needs to be replaced with the " + "'packed' keyword argument to cdef().") + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + _warn_for_non_extern_non_static_global_variable(decl) + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, + typedef_example=None): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + # a hack: in 'typedef int foo_t[...][...];', don't use '...' as + # the length but use directly the C expression that would be + # generated by recompiler.py. This lets the typedef be used in + # many more places within recompiler.py + if typedef_example is not None: + if length == '...': + length = '_cffi_array_len(%s)' % (typedef_example,) + typedef_example = "*" + typedef_example + # + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok, + typedef_example=typedef_example) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/.venv/lib/python3.9/site-packages/cffi/error.py b/.venv/lib/python3.9/site-packages/cffi/error.py new file mode 100644 index 0000000..0a27247 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/.venv/lib/python3.9/site-packages/cffi/ffiplatform.py b/.venv/lib/python3.9/site-packages/cffi/ffiplatform.py new file mode 100644 index 0000000..adca28f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/ffiplatform.py @@ -0,0 +1,113 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + from cffi._shimmed_dist_utils import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity + + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = set_threshold(0) or 0 + try: + set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + set_threshold(old_level) + except (CompileError, LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() diff --git a/.venv/lib/python3.9/site-packages/cffi/lock.py b/.venv/lib/python3.9/site-packages/cffi/lock.py new file mode 100644 index 0000000..db91b71 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/.venv/lib/python3.9/site-packages/cffi/model.py b/.venv/lib/python3.9/site-packages/cffi/model.py new file mode 100644 index 0000000..e5f4cae --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/model.py @@ -0,0 +1,618 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + '_cffi_float_complex_t': 'j', + '_cffi_double_complex_t': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = " *&" + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + extra = qualify(quals, extra) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def length_is_unknown(self): + return isinstance(self.length, str) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length_is_unknown(): + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/.venv/lib/python3.9/site-packages/cffi/parse_c_type.h b/.venv/lib/python3.9/site-packages/cffi/parse_c_type.h new file mode 100644 index 0000000..84e4ef8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/.venv/lib/python3.9/site-packages/cffi/pkgconfig.py b/.venv/lib/python3.9/site-packages/cffi/pkgconfig.py new file mode 100644 index 0000000..5c93f15 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/.venv/lib/python3.9/site-packages/cffi/recompiler.py b/.venv/lib/python3.9/site-packages/cffi/recompiler.py new file mode 100644 index 0000000..7734a34 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/recompiler.py @@ -0,0 +1,1598 @@ +import io, os, sys, sysconfig +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + +USE_LIMITED_API = ((sys.platform != 'win32' or sys.version_info < (3, 0) or + sys.version_info >= (3, 5)) and + not sysconfig.get_config_var("Py_GIL_DISABLED")) # free-threaded doesn't yet support limited API + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _enum_fields(self, tp): + # When producing C, expand all anonymous struct/union fields. + # That's necessary to have C code checking the offsets of the + # individual fields contained in them. When producing Python, + # don't do it and instead write it like it is, with the + # corresponding fields having an empty name. Empty names are + # recognized at runtime when we import the generated Python + # file. + expand_anonymous_struct_union = not self.target_is_python + return tp.enumfields(expand_anonymous_struct_union) + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in self._enum_fields(tp): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + if not USE_LIMITED_API: + prnt('#define _CFFI_NO_LIMITED_API') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy > 0 or self.ffi._embedding is not None: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if flags & 1: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in self._enum_fields(tp): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(self._enum_fields(tp)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '(size_t)(((char *)&((%s)4096)->%s) - (char *)4096)' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + if not isinstance(s, bytes): # unicode + s = s.encode('utf-8') # -> bytes + else: + s.decode('utf-8') # got bytes, check for valid utf-8 + try: + s.decode('ascii') + except UnicodeDecodeError: + s = b'# -*- encoding: utf8 -*-\n' + s + for line in s.splitlines(True): + comment = line + if type('//') is bytes: # python2 + line = map(ord, line) # make a list of integers + else: # python3 + # type(line) is bytes, which enumerates like a list of integers + comment = ascii(comment)[1:-1] + prnt(('// ' + comment).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (c,) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _is_file_like(maybefile): + # compare to xml.etree.ElementTree._get_writer + return hasattr(maybefile, 'write') + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + if _is_file_like(target_file): + recompiler.write_source_to_f(target_file, preamble) + return True + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + # FUTURE: this module was removed in setuptools 74; this is likely dead code and should be removed, + # since the toolchain it supports (VS2005-2008) is also long dead. + from cffi._shimmed_dist_utils import MSVCCompiler + if MSVCCompiler is not None: + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from cffi._shimmed_dist_utils import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from cffi._shimmed_dist_utils import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, + uses_ffiplatform=True, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + if call_c_compiler and _is_file_like(c_file): + raise TypeError("Writing to file-like objects is not supported " + "with call_c_compiler=True") + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + if uses_ffiplatform: + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + else: + ext = None + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/.venv/lib/python3.9/site-packages/cffi/setuptools_ext.py b/.venv/lib/python3.9/site-packages/cffi/setuptools_ext.py new file mode 100644 index 0000000..5cdd246 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,229 @@ +import os +import sys +import sysconfig + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from cffi._shimmed_dist_utils import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + Recently (2020) we started shipping only >= 3.5 wheels, though. So + we'll give it another try and set py_limited_api on Windows >= 3.5. + """ + from cffi._shimmed_dist_utils import log + from cffi import recompiler + + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and recompiler.USE_LIMITED_API): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + + if sysconfig.get_config_var("Py_GIL_DISABLED"): + if kwds.get('py_limited_api'): + log.info("Ignoring py_limited_api=True for free-threaded build.") + + kwds['py_limited_api'] = False + + if kwds.get('py_limited_api') is False: + # avoid setting Py_LIMITED_API if py_limited_api=False + # which _cffi_include.h does unless _CFFI_NO_LIMITED_API is defined + kwds.setdefault("define_macros", []).append(("_CFFI_NO_LIMITED_API", None)) + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import Extension, log, mkpath + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import log, mkpath + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/.venv/lib/python3.9/site-packages/cffi/vengine_cpy.py b/.venv/lib/python3.9/site-packages/cffi/vengine_cpy.py new file mode 100644 index 0000000..02e6a47 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1087 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys +from . import model +from .error import VerificationError +from . import _imp_emulation as imp + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt('#if Py_GIL_DISABLED') + prnt(' PyUnstable_Module_SetGIL(lib, Py_MOD_GIL_NOT_USED);') + prnt('#endif') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif tp.is_complex_type(): + raise VerificationError( + "not implemented in verify(): complex types") + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + 'alloca((size_t)datasize) : NULL;' % (tovar,)) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + prnt(' PyObject *pyresult;') + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = tp.length_is_unknown()) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/.venv/lib/python3.9/site-packages/cffi/vengine_gen.py b/.venv/lib/python3.9/site-packages/cffi/vengine_gen.py new file mode 100644 index 0000000..bffc821 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/vengine_gen.py @@ -0,0 +1,679 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length_is_unknown(): + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif +''' diff --git a/.venv/lib/python3.9/site-packages/cffi/verifier.py b/.venv/lib/python3.9/site-packages/cffi/verifier.py new file mode 100644 index 0000000..e392a2b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cffi/verifier.py @@ -0,0 +1,306 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join(['%d.%d' % sys.version_info[:2], + __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/METADATA b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/METADATA new file mode 100644 index 0000000..15080bb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/METADATA @@ -0,0 +1,139 @@ +Metadata-Version: 2.4 +Name: cryptography +Version: 46.0.5 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Free Threading :: 3 - Stable +Classifier: Topic :: Security :: Cryptography +Requires-Dist: cffi>=1.14 ; python_full_version == '3.8.*' and platform_python_implementation != 'PyPy' +Requires-Dist: cffi>=2.0.0 ; python_full_version >= '3.9' and platform_python_implementation != 'PyPy' +Requires-Dist: typing-extensions>=4.13.2 ; python_full_version < '3.11' +Requires-Dist: bcrypt>=3.1.5 ; extra == 'ssh' +Requires-Dist: nox[uv]>=2024.4.15 ; extra == 'nox' +Requires-Dist: cryptography-vectors==46.0.5 ; extra == 'test' +Requires-Dist: pytest>=7.4.0 ; extra == 'test' +Requires-Dist: pytest-benchmark>=4.0 ; extra == 'test' +Requires-Dist: pytest-cov>=2.10.1 ; extra == 'test' +Requires-Dist: pytest-xdist>=3.5.0 ; extra == 'test' +Requires-Dist: pretend>=0.7 ; extra == 'test' +Requires-Dist: certifi>=2024 ; extra == 'test' +Requires-Dist: pytest-randomly ; extra == 'test-randomorder' +Requires-Dist: sphinx>=5.3.0 ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme>=3.0.0 ; extra == 'docs' +Requires-Dist: sphinx-inline-tabs ; extra == 'docs' +Requires-Dist: pyenchant>=3 ; extra == 'docstest' +Requires-Dist: readme-renderer>=30.0 ; extra == 'docstest' +Requires-Dist: sphinxcontrib-spelling>=7.3.1 ; extra == 'docstest' +Requires-Dist: build>=1.0.0 ; extra == 'sdist' +Requires-Dist: ruff>=0.11.11 ; extra == 'pep8test' +Requires-Dist: mypy>=1.14 ; extra == 'pep8test' +Requires-Dist: check-sdist ; extra == 'pep8test' +Requires-Dist: click>=8.0.1 ; extra == 'pep8test' +Provides-Extra: ssh +Provides-Extra: nox +Provides-Extra: test +Provides-Extra: test-randomorder +Provides-Extra: docs +Provides-Extra: docstest +Provides-Extra: sdist +Provides-Extra: pep8test +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Author-email: The Python Cryptographic Authority and individual contributors +License-Expression: Apache-2.0 OR BSD-3-Clause +Requires-Python: >=3.8, !=3.9.0, !=3.9.1 +Description-Content-Type: text/x-rst; charset=UTF-8 +Project-URL: homepage, https://github.com/pyca/cryptography +Project-URL: documentation, https://cryptography.io/ +Project-URL: source, https://github.com/pyca/cryptography/ +Project-URL: issues, https://github.com/pyca/cryptography/issues +Project-URL: changelog, https://cryptography.io/en/latest/changelog/ + +pyca/cryptography +================= + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.org/project/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://github.com/pyca/cryptography/actions/workflows/ci.yml/badge.svg + :target: https://github.com/pyca/cryptography/actions/workflows/ci.yml?query=branch%3Amain + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 3.8+ and PyPy3 7.3.11+. + +``cryptography`` includes both high level recipes and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests, and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + b'...' + >>> f.decrypt(token) + b'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +You can install ``cryptography`` with: + +.. code-block:: console + + $ pip install cryptography + +For full details see `the installation documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get +involved. + +Security +~~~~~~~~ + +Need to report a security issue? Please consult our `security reporting`_ +documentation. + + +.. _`documentation`: https://cryptography.io/ +.. _`the installation documentation`: https://cryptography.io/en/latest/installation/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev +.. _`security reporting`: https://cryptography.io/en/latest/security/ + diff --git a/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/RECORD b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/RECORD new file mode 100644 index 0000000..452595e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/RECORD @@ -0,0 +1,180 @@ +cryptography-46.0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cryptography-46.0.5.dist-info/METADATA,sha256=aOYB9_B-Ccske76ncMz-w9c_VnzYihv_7kxZlt2i2WQ,5748 +cryptography-46.0.5.dist-info/RECORD,, +cryptography-46.0.5.dist-info/WHEEL,sha256=Z_Q61AkScnNB5zIIHPJrk2IJUl1F1WT6fVLUIYzC-XI,107 +cryptography-46.0.5.dist-info/licenses/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197 +cryptography-46.0.5.dist-info/licenses/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360 +cryptography-46.0.5.dist-info/licenses/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532 +cryptography/__about__.py,sha256=GWg4NAxg4vsSKUwmDy1HjUeAOhqTA46wIhiY6i03NSU,445 +cryptography/__init__.py,sha256=mthuUrTd4FROCpUYrTIqhjz6s6T9djAZrV7nZ1oMm2o,364 +cryptography/__pycache__/__about__.cpython-39.pyc,, +cryptography/__pycache__/__init__.cpython-39.pyc,, +cryptography/__pycache__/exceptions.cpython-39.pyc,, +cryptography/__pycache__/fernet.cpython-39.pyc,, +cryptography/__pycache__/utils.cpython-39.pyc,, +cryptography/exceptions.py,sha256=835EWILc2fwxw-gyFMriciC2SqhViETB10LBSytnDIc,1087 +cryptography/fernet.py,sha256=3Cvxkh0KJSbX8HbnCHu4wfCW7U0GgfUA3v_qQ8a8iWc,6963 +cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455 +cryptography/hazmat/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/__pycache__/_oid.cpython-39.pyc,, +cryptography/hazmat/_oid.py,sha256=p8ThjwJB56Ci_rAIrjyJ1f8VjgD6e39es2dh8JIUBOw,17240 +cryptography/hazmat/asn1/__init__.py,sha256=hS_EWx3wVvZzfbCcNV8hzcDnyMM8H-BhIoS1TipUosk,293 +cryptography/hazmat/asn1/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/asn1/__pycache__/asn1.cpython-39.pyc,, +cryptography/hazmat/asn1/asn1.py,sha256=eMEThEXa19LQjcyVofgHsW6tsZnjp3ddH7bWkkcxfLM,3860 +cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361 +cryptography/hazmat/backends/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305 +cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-39.pyc,, +cryptography/hazmat/backends/openssl/backend.py,sha256=tV5AxBoFJ2GfA0DMWSY-0TxQJrpQoexzI9R4Kybb--4,10215 +cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/bindings/_rust.abi3.so,sha256=SdKcBJDv5GCVufc6BxPiUa5F9WsXEsewQsKaBKAa5gQ,12797816 +cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=KhqLhXFPArPzzJ7DYO9Fl8FoXB_BagAd_r4Dm_Ze9Xo,1257 +cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230 +cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=BrGjC8J6nwuS-r3EVcdXJB8ndotfY9mbQYOfpbPG0HA,354 +cryptography/hazmat/bindings/_rust/declarative_asn1.pyi,sha256=2ECFmYue1EPkHEE2Bm7aLwkjB0mSUTpr23v9MN4pri4,892 +cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640 +cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=VPVWuKHI9EMs09ZLRYAGvR0Iz0mCMmEzXAkgJHovpoM,4020 +cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=iOAMDyHoNwwCSZfZzuXDr64g4GpGUeDgEN-LjXqdrBM,1522 +cryptography/hazmat/bindings/_rust/openssl/aead.pyi,sha256=4Nddw6-ynzIB3w2W86WvkGKTLlTDk_6F5l54RHCuy3E,2688 +cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi,sha256=LhPzHWSXJq4grAJXn6zSvSSdV-aYIIscHDwIPlJGGPs,1315 +cryptography/hazmat/bindings/_rust/openssl/cmac.pyi,sha256=nPH0X57RYpsAkRowVpjQiHE566ThUTx7YXrsadmrmHk,564 +cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=Z3TC-G04-THtSdAOPLM1h2G7ml5bda1ElZUcn5wpuhk,1564 +cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=qBtkgj2albt2qFcnZ9UDrhzoNhCVO7HTby5VSf1EXMI,1299 +cryptography/hazmat/bindings/_rust/openssl/ec.pyi,sha256=zJy0pRa5n-_p2dm45PxECB_-B6SVZyNKfjxFDpPqT38,1691 +cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=VXfXd5G6hUivg399R1DYdmW3eTb0EebzDTqjRC2gaRw,532 +cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=Yx49lqdnjsD7bxiDV1kcaMrDktug5evi5a6zerMiy2s,514 +cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=OWZvBx7xfo_HJl41Nc--DugVyCVPIprZ3HlOPTSWH9g,984 +cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=BXZn7NDjL3JAbYW0SQ8pg1iyC5DbQXVhUAiwsi8DFR8,702 +cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=xXfFBb9QehHfDtEaxV_65Z0YK7NquOVIChpTLkgAs_k,2029 +cryptography/hazmat/bindings/_rust/openssl/keys.pyi,sha256=teIt8M6ZEMJrn4s3W0UnW0DZ-30Jd68WnSsKKG124l0,912 +cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=_SW9NtQ5FDlAbdclFtWpT4lGmxKIKHpN-4j8J2BzYfQ,585 +cryptography/hazmat/bindings/_rust/openssl/rsa.pyi,sha256=2OQCNSXkxgc-3uw1xiCCloIQTV6p9_kK79Yu0rhZgPc,1364 +cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=ewn4GpQyb7zPwE-ni7GtyQgMC0A1mLuqYsSyqv6nI_s,523 +cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=juTZTmli8jO_5Vcufg-vHvx_tCyezmSLIh_9PU3TczI,505 +cryptography/hazmat/bindings/_rust/pkcs12.pyi,sha256=vEEd5wDiZvb8ZGFaziLCaWLzAwoG_tvPUxLQw5_uOl8,1605 +cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=txGBJijqZshEcqra6byPNbnisIdlxzOSIHP2hl9arPs,1601 +cryptography/hazmat/bindings/_rust/test_support.pyi,sha256=PPhld-WkO743iXFPebeG0LtgK0aTzGdjcIsay1Gm5GE,757 +cryptography/hazmat/bindings/_rust/x509.pyi,sha256=n9X0IQ6ICbdIi-ExdCFZoBgeY6njm3QOVAVZwDQdnbk,9784 +cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-39.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-39.pyc,, +cryptography/hazmat/bindings/openssl/_conditional.py,sha256=DMOpA_XN4l70zTc5_J9DpwlbQeUBRTWpfIJ4yRIn1-U,5791 +cryptography/hazmat/bindings/openssl/binding.py,sha256=x8eocEmukO4cm7cHqfVmOoYY7CCXdoF1v1WhZQt9neo,4610 +cryptography/hazmat/decrepit/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 +cryptography/hazmat/decrepit/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/decrepit/ciphers/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 +cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-39.pyc,, +cryptography/hazmat/decrepit/ciphers/algorithms.py,sha256=YrKgHS4MfwWaMmPBYRymRRlC0phwWp9ycICFezeJPGk,2595 +cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/_serialization.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/cmac.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/constant_time.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/hashes.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/hmac.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/keywrap.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/padding.cpython-39.pyc,, +cryptography/hazmat/primitives/__pycache__/poly1305.cpython-39.pyc,, +cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532 +cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=Eh3i7lwedHfi0eLSsH93PZxQKzY9I6lkK67vL4V5tOc,1522 +cryptography/hazmat/primitives/_serialization.py,sha256=chgPCSF2jxI2Cr5gB-qbWXOvOfupBh4CARS0KAhv9AM,5123 +cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-39.pyc,, +cryptography/hazmat/primitives/asymmetric/dh.py,sha256=0v_vEFFz5pQ1QG-FkWDyvgv7IfuVZSH5Q6LyFI5A8rg,3645 +cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=Ld_bbbqQFz12dObHxIkzEQzX0SWWP41RLSWkYSaKhqE,4213 +cryptography/hazmat/primitives/asymmetric/ec.py,sha256=dj0ZR_jTVI1wojjipjbXNVccPSIRObWxSZcTGQKGbHc,13437 +cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=jZW5cs472wXXV3eB0sE1b8w64gdazwwU0_MT5UOTiXs,3700 +cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=yAetgn2f2JYf0BO8MapGzXeThsvSMG5LmUCrxVOidAA,3729 +cryptography/hazmat/primitives/asymmetric/padding.py,sha256=vQ6l6gOg9HqcbOsvHrSiJRVLdEj9L4m4HkRGYziTyFA,2854 +cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=ZnKOo2f34MCCOupC03Y1uR-_jiSG5IrelHEmxaME3D4,8303 +cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996 +cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790 +cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=_4nQeZ3yJ3Lg0RpXnaqA-1yt6vbx1F-wzLcaZHwSpeE,3613 +cryptography/hazmat/primitives/asymmetric/x448.py,sha256=WKBLtuVfJqiBRro654fGaQAlvsKbqbNkK7c4A_ZCdV0,3642 +cryptography/hazmat/primitives/ciphers/__init__.py,sha256=eyEXmjk6_CZXaOPYDr7vAYGXr29QvzgWL2-4CSolLFs,680 +cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-39.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-39.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-39.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-39.pyc,, +cryptography/hazmat/primitives/ciphers/aead.py,sha256=Fzlyx7w8KYQakzDp1zWgJnIr62zgZrgVh1u2h4exB54,634 +cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=Q7ZJwcsx83Mgxv5y7r6CyJKSdsOwC-my-5A67-ma2vw,3407 +cryptography/hazmat/primitives/ciphers/base.py,sha256=aBC7HHBBoixebmparVr0UlODs3VD0A7B6oz_AaRjDv8,4253 +cryptography/hazmat/primitives/ciphers/modes.py,sha256=20stpwhDtbAvpH0SMf9EDHIciwmTF-JMBUOZ9bU8WiQ,8318 +cryptography/hazmat/primitives/cmac.py,sha256=sz_s6H_cYnOvx-VNWdIKhRhe3Ymp8z8J0D3CBqOX3gg,338 +cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422 +cryptography/hazmat/primitives/hashes.py,sha256=M8BrlKB3U6DEtHvWTV5VRjpteHv1kS3Zxm_Bsk04cr8,5184 +cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423 +cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750 +cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-39.pyc,, +cryptography/hazmat/primitives/kdf/argon2.py,sha256=UFDNXG0v-rw3DqAQTB1UQAsQC2M5Ejg0k_6OCyhLKus,460 +cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=Ua8KoLXXnzgsrAUmHpyKymaPt8aPRP0EHEaBz7QCQ9I,3737 +cryptography/hazmat/primitives/kdf/hkdf.py,sha256=M0lAEfRoc4kpp4-nwDj9yB-vNZukIOYEQrUlWsBNn9o,543 +cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=oZepvo4evhKkkJQWRDwaPoIbyTaFmDc5NPimxg6lfKg,9165 +cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1WIwhELR0w8ztTpTu8BrFiYWmK3hUfJq08I79TxwieE,1957 +cryptography/hazmat/primitives/kdf/scrypt.py,sha256=XyWUdUUmhuI9V6TqAPOvujCSMGv1XQdg0a21IWCmO-U,590 +cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=zLTcF665QFvXX2f8TS7fmBZTteXpFjKahzfjjQcCJyw,1999 +cryptography/hazmat/primitives/keywrap.py,sha256=XV4Pj2fqSeD-RqZVvY2cA3j5_7RwJSFygYuLfk2ujCo,5650 +cryptography/hazmat/primitives/padding.py,sha256=QT-U-NvV2eQGO1wVPbDiNGNSc9keRDS-ig5cQOrLz0E,1865 +cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355 +cryptography/hazmat/primitives/serialization/__init__.py,sha256=Q7uTgDlt7n3WfsMT6jYwutC6DIg_7SEeoAm1GHZ5B5E,1705 +cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-39.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-39.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-39.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-39.pyc,, +cryptography/hazmat/primitives/serialization/base.py,sha256=ikq5MJIwp_oUnjiaBco_PmQwOTYuGi-XkYUYHKy8Vo0,615 +cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=mS9cFNG4afzvseoc5e1MWoY2VskfL8N8Y_OFjl67luY,5104 +cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=5OR_Tkysxaprn4FegvJIfbep9rJ9wok6FLWvWwQ5-Mg,13943 +cryptography/hazmat/primitives/serialization/ssh.py,sha256=hPV5obFznz0QhFfXFPOeQ8y6MsurA0xVMQiLnLESEs8,53700 +cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258 +cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-39.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-39.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-39.pyc,, +cryptography/hazmat/primitives/twofactor/hotp.py,sha256=ivZo5BrcCGWLsqql4nZV0XXCjyGPi_iHfDFltGlOJwk,3256 +cryptography/hazmat/primitives/twofactor/totp.py,sha256=m5LPpRL00kp4zY8gTjr55Hfz9aMlPS53kHmVkSQCmdY,1652 +cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cryptography/utils.py,sha256=nFHkPQZycOQGeBtBRkWSA4WjOHFo7pwummQt-PPSkZc,4349 +cryptography/x509/__init__.py,sha256=xloN0swseNx-m2WFZmCA17gOoxQWqeU82UVjEdJBePQ,8257 +cryptography/x509/__pycache__/__init__.cpython-39.pyc,, +cryptography/x509/__pycache__/base.cpython-39.pyc,, +cryptography/x509/__pycache__/certificate_transparency.cpython-39.pyc,, +cryptography/x509/__pycache__/extensions.cpython-39.pyc,, +cryptography/x509/__pycache__/general_name.cpython-39.pyc,, +cryptography/x509/__pycache__/name.cpython-39.pyc,, +cryptography/x509/__pycache__/ocsp.cpython-39.pyc,, +cryptography/x509/__pycache__/oid.cpython-39.pyc,, +cryptography/x509/__pycache__/verification.cpython-39.pyc,, +cryptography/x509/base.py,sha256=OrmTw3y8B6AE_nGXQPN8x9kq-d7rDWeH13gCq6T6D6U,27997 +cryptography/x509/certificate_transparency.py,sha256=JqoOIDhlwInrYMFW6IFn77WJ0viF-PB_rlZV3vs9MYc,797 +cryptography/x509/extensions.py,sha256=QxYrqR6SF1qzR9ZraP8wDiIczlEVlAFuwDRVcltB6Tk,77724 +cryptography/x509/general_name.py,sha256=sP_rV11Qlpsk4x3XXGJY_Mv0Q_s9dtjeLckHsjpLQoQ,7836 +cryptography/x509/name.py,sha256=ty0_xf0LnHwZAdEf-d8FLO1K4hGqx_7DsD3CHwoLJiY,15101 +cryptography/x509/ocsp.py,sha256=Yey6NdFV1MPjop24Mj_VenjEpg3kUaMopSWOK0AbeBs,12699 +cryptography/x509/oid.py,sha256=BUzgXXGVWilkBkdKPTm9R4qElE9gAGHgdYPMZAp7PJo,931 +cryptography/x509/verification.py,sha256=gR2C2c-XZQtblZhT5T5vjSKOtCb74ef2alPVmEcwFlM,958 diff --git a/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/WHEEL new file mode 100644 index 0000000..9774bb4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: maturin (1.9.4) +Root-Is-Purelib: false +Tag: cp38-abi3-manylinux_2_34_x86_64 + diff --git a/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/licenses/LICENSE b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/licenses/LICENSE new file mode 100644 index 0000000..b11f379 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/licenses/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made +under the terms of *both* these licenses. diff --git a/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/licenses/LICENSE.APACHE b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/licenses/LICENSE.APACHE new file mode 100644 index 0000000..62589ed --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/licenses/LICENSE.APACHE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/licenses/LICENSE.BSD b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/licenses/LICENSE.BSD new file mode 100644 index 0000000..ec1a29d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography-46.0.5.dist-info/licenses/LICENSE.BSD @@ -0,0 +1,27 @@ +Copyright (c) Individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of PyCA Cryptography nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.9/site-packages/cryptography/__about__.py b/.venv/lib/python3.9/site-packages/cryptography/__about__.py new file mode 100644 index 0000000..43b3024 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/__about__.py @@ -0,0 +1,17 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +__all__ = [ + "__author__", + "__copyright__", + "__version__", +] + +__version__ = "46.0.5" + + +__author__ = "The Python Cryptographic Authority and individual contributors" +__copyright__ = f"Copyright 2013-2025 {__author__}" diff --git a/.venv/lib/python3.9/site-packages/cryptography/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/__init__.py new file mode 100644 index 0000000..d374f75 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.__about__ import __author__, __copyright__, __version__ + +__all__ = [ + "__author__", + "__copyright__", + "__version__", +] diff --git a/.venv/lib/python3.9/site-packages/cryptography/__pycache__/__about__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/__pycache__/__about__.cpython-39.pyc new file mode 100644 index 0000000..08200aa Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/__pycache__/__about__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..7104f93 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/__pycache__/exceptions.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..789bcf9 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/__pycache__/exceptions.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/__pycache__/fernet.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/__pycache__/fernet.cpython-39.pyc new file mode 100644 index 0000000..2fcba74 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/__pycache__/fernet.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/__pycache__/utils.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..bbe3968 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/__pycache__/utils.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/exceptions.py b/.venv/lib/python3.9/site-packages/cryptography/exceptions.py new file mode 100644 index 0000000..fe125ea --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/exceptions.py @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.bindings._rust import exceptions as rust_exceptions + +if typing.TYPE_CHECKING: + from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +_Reasons = rust_exceptions._Reasons + + +class UnsupportedAlgorithm(Exception): + def __init__(self, message: str, reason: _Reasons | None = None) -> None: + super().__init__(message) + self._reason = reason + + +class AlreadyFinalized(Exception): + pass + + +class AlreadyUpdated(Exception): + pass + + +class NotYetFinalized(Exception): + pass + + +class InvalidTag(Exception): + pass + + +class InvalidSignature(Exception): + pass + + +class InternalError(Exception): + def __init__( + self, msg: str, err_code: list[rust_openssl.OpenSSLError] + ) -> None: + super().__init__(msg) + self.err_code = err_code + + +class InvalidKey(Exception): + pass diff --git a/.venv/lib/python3.9/site-packages/cryptography/fernet.py b/.venv/lib/python3.9/site-packages/cryptography/fernet.py new file mode 100644 index 0000000..c6744ae --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/fernet.py @@ -0,0 +1,224 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import base64 +import binascii +import os +import time +import typing +from collections.abc import Iterable + +from cryptography import utils +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import hashes, padding +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.primitives.hmac import HMAC + + +class InvalidToken(Exception): + pass + + +_MAX_CLOCK_SKEW = 60 + + +class Fernet: + def __init__( + self, + key: bytes | str, + backend: typing.Any = None, + ) -> None: + try: + key = base64.urlsafe_b64decode(key) + except binascii.Error as exc: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) from exc + if len(key) != 32: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) + + self._signing_key = key[:16] + self._encryption_key = key[16:] + + @classmethod + def generate_key(cls) -> bytes: + return base64.urlsafe_b64encode(os.urandom(32)) + + def encrypt(self, data: bytes) -> bytes: + return self.encrypt_at_time(data, int(time.time())) + + def encrypt_at_time(self, data: bytes, current_time: int) -> bytes: + iv = os.urandom(16) + return self._encrypt_from_parts(data, current_time, iv) + + def _encrypt_from_parts( + self, data: bytes, current_time: int, iv: bytes + ) -> bytes: + utils._check_bytes("data", data) + + padder = padding.PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(data) + padder.finalize() + encryptor = Cipher( + algorithms.AES(self._encryption_key), + modes.CBC(iv), + ).encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + basic_parts = ( + b"\x80" + + current_time.to_bytes(length=8, byteorder="big") + + iv + + ciphertext + ) + + h = HMAC(self._signing_key, hashes.SHA256()) + h.update(basic_parts) + hmac = h.finalize() + return base64.urlsafe_b64encode(basic_parts + hmac) + + def decrypt(self, token: bytes | str, ttl: int | None = None) -> bytes: + timestamp, data = Fernet._get_unverified_token_data(token) + if ttl is None: + time_info = None + else: + time_info = (ttl, int(time.time())) + return self._decrypt_data(data, timestamp, time_info) + + def decrypt_at_time( + self, token: bytes | str, ttl: int, current_time: int + ) -> bytes: + if ttl is None: + raise ValueError( + "decrypt_at_time() can only be used with a non-None ttl" + ) + timestamp, data = Fernet._get_unverified_token_data(token) + return self._decrypt_data(data, timestamp, (ttl, current_time)) + + def extract_timestamp(self, token: bytes | str) -> int: + timestamp, data = Fernet._get_unverified_token_data(token) + # Verify the token was not tampered with. + self._verify_signature(data) + return timestamp + + @staticmethod + def _get_unverified_token_data(token: bytes | str) -> tuple[int, bytes]: + if not isinstance(token, (str, bytes)): + raise TypeError("token must be bytes or str") + + try: + data = base64.urlsafe_b64decode(token) + except (TypeError, binascii.Error): + raise InvalidToken + + if not data or data[0] != 0x80: + raise InvalidToken + + if len(data) < 9: + raise InvalidToken + + timestamp = int.from_bytes(data[1:9], byteorder="big") + return timestamp, data + + def _verify_signature(self, data: bytes) -> None: + h = HMAC(self._signing_key, hashes.SHA256()) + h.update(data[:-32]) + try: + h.verify(data[-32:]) + except InvalidSignature: + raise InvalidToken + + def _decrypt_data( + self, + data: bytes, + timestamp: int, + time_info: tuple[int, int] | None, + ) -> bytes: + if time_info is not None: + ttl, current_time = time_info + if timestamp + ttl < current_time: + raise InvalidToken + + if current_time + _MAX_CLOCK_SKEW < timestamp: + raise InvalidToken + + self._verify_signature(data) + + iv = data[9:25] + ciphertext = data[25:-32] + decryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv) + ).decryptor() + plaintext_padded = decryptor.update(ciphertext) + try: + plaintext_padded += decryptor.finalize() + except ValueError: + raise InvalidToken + unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() + + unpadded = unpadder.update(plaintext_padded) + try: + unpadded += unpadder.finalize() + except ValueError: + raise InvalidToken + return unpadded + + +class MultiFernet: + def __init__(self, fernets: Iterable[Fernet]): + fernets = list(fernets) + if not fernets: + raise ValueError( + "MultiFernet requires at least one Fernet instance" + ) + self._fernets = fernets + + def encrypt(self, msg: bytes) -> bytes: + return self.encrypt_at_time(msg, int(time.time())) + + def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes: + return self._fernets[0].encrypt_at_time(msg, current_time) + + def rotate(self, msg: bytes | str) -> bytes: + timestamp, data = Fernet._get_unverified_token_data(msg) + for f in self._fernets: + try: + p = f._decrypt_data(data, timestamp, None) + break + except InvalidToken: + pass + else: + raise InvalidToken + + iv = os.urandom(16) + return self._fernets[0]._encrypt_from_parts(p, timestamp, iv) + + def decrypt(self, msg: bytes | str, ttl: int | None = None) -> bytes: + for f in self._fernets: + try: + return f.decrypt(msg, ttl) + except InvalidToken: + pass + raise InvalidToken + + def decrypt_at_time( + self, msg: bytes | str, ttl: int, current_time: int + ) -> bytes: + for f in self._fernets: + try: + return f.decrypt_at_time(msg, ttl, current_time) + except InvalidToken: + pass + raise InvalidToken + + def extract_timestamp(self, msg: bytes | str) -> int: + for f in self._fernets: + try: + return f.extract_timestamp(msg) + except InvalidToken: + pass + raise InvalidToken diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/__init__.py new file mode 100644 index 0000000..b9f1187 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +""" +Hazardous Materials + +This is a "Hazardous Materials" module. You should ONLY use it if you're +100% absolutely sure that you know what you're doing because this module +is full of land mines, dragons, and dinosaurs with laser guns. +""" diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e8977eb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-39.pyc new file mode 100644 index 0000000..f873f8b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/_oid.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/_oid.py new file mode 100644 index 0000000..4bf138d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/_oid.py @@ -0,0 +1,356 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import ( + ObjectIdentifier as ObjectIdentifier, +) +from cryptography.hazmat.primitives import hashes + + +class ExtensionOID: + SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") + SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") + KEY_USAGE = ObjectIdentifier("2.5.29.15") + PRIVATE_KEY_USAGE_PERIOD = ObjectIdentifier("2.5.29.16") + SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") + ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") + BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") + NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") + CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") + CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") + POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") + AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") + POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") + EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") + FRESHEST_CRL = ObjectIdentifier("2.5.29.46") + INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") + ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28") + AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") + SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") + OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") + TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24") + CRL_NUMBER = ObjectIdentifier("2.5.29.20") + DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27") + PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier( + "1.3.6.1.4.1.11129.2.4.2" + ) + PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3") + SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5") + MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7") + ADMISSIONS = ObjectIdentifier("1.3.36.8.3.3") + + +class OCSPExtensionOID: + NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2") + ACCEPTABLE_RESPONSES = ObjectIdentifier("1.3.6.1.5.5.7.48.1.4") + + +class CRLEntryExtensionOID: + CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") + CRL_REASON = ObjectIdentifier("2.5.29.21") + INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") + + +class NameOID: + COMMON_NAME = ObjectIdentifier("2.5.4.3") + COUNTRY_NAME = ObjectIdentifier("2.5.4.6") + LOCALITY_NAME = ObjectIdentifier("2.5.4.7") + STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") + STREET_ADDRESS = ObjectIdentifier("2.5.4.9") + ORGANIZATION_IDENTIFIER = ObjectIdentifier("2.5.4.97") + ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") + ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") + SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") + SURNAME = ObjectIdentifier("2.5.4.4") + GIVEN_NAME = ObjectIdentifier("2.5.4.42") + TITLE = ObjectIdentifier("2.5.4.12") + INITIALS = ObjectIdentifier("2.5.4.43") + GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") + X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45") + DN_QUALIFIER = ObjectIdentifier("2.5.4.46") + PSEUDONYM = ObjectIdentifier("2.5.4.65") + USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1") + DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") + EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") + JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") + JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") + JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( + "1.3.6.1.4.1.311.60.2.1.2" + ) + BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") + POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16") + POSTAL_CODE = ObjectIdentifier("2.5.4.17") + INN = ObjectIdentifier("1.2.643.3.131.1.1") + OGRN = ObjectIdentifier("1.2.643.100.1") + SNILS = ObjectIdentifier("1.2.643.100.3") + UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") + + +class SignatureAlgorithmOID: + RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") + RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") + # This is an alternate OID for RSA with SHA1 that is occasionally seen + _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29") + RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") + RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") + RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") + RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") + RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13") + RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14") + RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15") + RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16") + RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") + ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") + ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") + ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") + ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") + ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") + ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9") + ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10") + ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11") + ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12") + DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") + DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") + DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") + DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3") + DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") + GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3") + GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2") + GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3") + + +_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, hashes.HashAlgorithm | None] = { + SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(), + SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.RSA_WITH_SHA3_224: hashes.SHA3_224(), + SignatureAlgorithmOID.RSA_WITH_SHA3_256: hashes.SHA3_256(), + SignatureAlgorithmOID.RSA_WITH_SHA3_384: hashes.SHA3_384(), + SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_224: hashes.SHA3_224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_256: hashes.SHA3_256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_384: hashes.SHA3_384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(), + SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ED25519: None, + SignatureAlgorithmOID.ED448: None, + SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None, + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None, + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None, +} + + +class HashAlgorithmOID: + SHA1 = ObjectIdentifier("1.3.14.3.2.26") + SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.2.4") + SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.2.1") + SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.2.2") + SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.2.3") + SHA3_224 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.224") + SHA3_256 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.256") + SHA3_384 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.384") + SHA3_512 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.512") + SHA3_224_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.7") + SHA3_256_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.8") + SHA3_384_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.9") + SHA3_512_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.10") + + +class PublicKeyAlgorithmOID: + DSA = ObjectIdentifier("1.2.840.10040.4.1") + EC_PUBLIC_KEY = ObjectIdentifier("1.2.840.10045.2.1") + RSAES_PKCS1_v1_5 = ObjectIdentifier("1.2.840.113549.1.1.1") + RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") + X25519 = ObjectIdentifier("1.3.101.110") + X448 = ObjectIdentifier("1.3.101.111") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") + + +class ExtendedKeyUsageOID: + SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") + CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") + CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") + EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") + TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") + OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") + ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0") + SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2") + KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5") + IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17") + BUNDLE_SECURITY = ObjectIdentifier("1.3.6.1.5.5.7.3.35") + CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4") + + +class OtherNameFormOID: + PERMANENT_IDENTIFIER = ObjectIdentifier("1.3.6.1.5.5.7.8.3") + HW_MODULE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.4") + DNS_SRV = ObjectIdentifier("1.3.6.1.5.5.7.8.7") + NAI_REALM = ObjectIdentifier("1.3.6.1.5.5.7.8.8") + SMTP_UTF8_MAILBOX = ObjectIdentifier("1.3.6.1.5.5.7.8.9") + ACP_NODE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.10") + BUNDLE_EID = ObjectIdentifier("1.3.6.1.5.5.7.8.11") + + +class AuthorityInformationAccessOID: + CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") + OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") + + +class SubjectInformationAccessOID: + CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5") + + +class CertificatePoliciesOID: + CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") + CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") + ANY_POLICY = ObjectIdentifier("2.5.29.32.0") + + +class AttributeOID: + CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7") + UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") + + +_OID_NAMES = { + NameOID.COMMON_NAME: "commonName", + NameOID.COUNTRY_NAME: "countryName", + NameOID.LOCALITY_NAME: "localityName", + NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", + NameOID.STREET_ADDRESS: "streetAddress", + NameOID.ORGANIZATION_NAME: "organizationName", + NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", + NameOID.SERIAL_NUMBER: "serialNumber", + NameOID.SURNAME: "surname", + NameOID.GIVEN_NAME: "givenName", + NameOID.TITLE: "title", + NameOID.GENERATION_QUALIFIER: "generationQualifier", + NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier", + NameOID.DN_QUALIFIER: "dnQualifier", + NameOID.PSEUDONYM: "pseudonym", + NameOID.USER_ID: "userID", + NameOID.DOMAIN_COMPONENT: "domainComponent", + NameOID.EMAIL_ADDRESS: "emailAddress", + NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", + NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", + NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( + "jurisdictionStateOrProvinceName" + ), + NameOID.BUSINESS_CATEGORY: "businessCategory", + NameOID.POSTAL_ADDRESS: "postalAddress", + NameOID.POSTAL_CODE: "postalCode", + NameOID.INN: "INN", + NameOID.OGRN: "OGRN", + NameOID.SNILS: "SNILS", + NameOID.UNSTRUCTURED_NAME: "unstructuredName", + SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", + SignatureAlgorithmOID.RSASSA_PSS: "rsassaPss", + SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", + SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", + SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", + SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", + SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", + SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", + SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", + SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", + SignatureAlgorithmOID.ED25519: "ed25519", + SignatureAlgorithmOID.ED448: "ed448", + SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: ( + "GOST R 34.11-94 with GOST R 34.10-2001" + ), + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: ( + "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)" + ), + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: ( + "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)" + ), + HashAlgorithmOID.SHA1: "sha1", + HashAlgorithmOID.SHA224: "sha224", + HashAlgorithmOID.SHA256: "sha256", + HashAlgorithmOID.SHA384: "sha384", + HashAlgorithmOID.SHA512: "sha512", + HashAlgorithmOID.SHA3_224: "sha3_224", + HashAlgorithmOID.SHA3_256: "sha3_256", + HashAlgorithmOID.SHA3_384: "sha3_384", + HashAlgorithmOID.SHA3_512: "sha3_512", + HashAlgorithmOID.SHA3_224_NIST: "sha3_224", + HashAlgorithmOID.SHA3_256_NIST: "sha3_256", + HashAlgorithmOID.SHA3_384_NIST: "sha3_384", + HashAlgorithmOID.SHA3_512_NIST: "sha3_512", + PublicKeyAlgorithmOID.DSA: "dsaEncryption", + PublicKeyAlgorithmOID.EC_PUBLIC_KEY: "id-ecPublicKey", + PublicKeyAlgorithmOID.RSAES_PKCS1_v1_5: "rsaEncryption", + PublicKeyAlgorithmOID.X25519: "X25519", + PublicKeyAlgorithmOID.X448: "X448", + ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", + ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", + ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", + ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", + ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", + ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", + ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin", + ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC", + ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", + ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", + ExtensionOID.KEY_USAGE: "keyUsage", + ExtensionOID.PRIVATE_KEY_USAGE_PERIOD: "privateKeyUsagePeriod", + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", + ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", + ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", + ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.PRECERT_POISON: "ctPoison", + ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate", + ExtensionOID.ADMISSIONS: "Admissions", + CRLEntryExtensionOID.CRL_REASON: "cRLReason", + CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", + CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", + ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", + ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", + ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", + ExtensionOID.POLICY_MAPPINGS: "policyMappings", + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", + ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", + ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", + ExtensionOID.FRESHEST_CRL: "freshestCRL", + ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", + ExtensionOID.ISSUING_DISTRIBUTION_POINT: "issuingDistributionPoint", + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", + ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", + ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", + ExtensionOID.CRL_NUMBER: "cRLNumber", + ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator", + ExtensionOID.TLS_FEATURE: "TLSFeature", + AuthorityInformationAccessOID.OCSP: "OCSP", + AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", + SubjectInformationAccessOID.CA_REPOSITORY: "caRepository", + CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", + CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", + OCSPExtensionOID.NONCE: "OCSPNonce", + AttributeOID.CHALLENGE_PASSWORD: "challengePassword", +} diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/__init__.py new file mode 100644 index 0000000..be68373 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.asn1.asn1 import encode_der, sequence + +__all__ = [ + "encode_der", + "sequence", +] diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..1a00aa8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-39.pyc new file mode 100644 index 0000000..3c5ea01 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/asn1.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/asn1.py new file mode 100644 index 0000000..dedad6f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/asn1/asn1.py @@ -0,0 +1,116 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import dataclasses +import sys +import typing + +if sys.version_info < (3, 11): + import typing_extensions + + # We use the `include_extras` parameter of `get_type_hints`, which was + # added in Python 3.9. This can be replaced by the `typing` version + # once the min version is >= 3.9 + if sys.version_info < (3, 9): + get_type_hints = typing_extensions.get_type_hints + else: + get_type_hints = typing.get_type_hints +else: + get_type_hints = typing.get_type_hints + +from cryptography.hazmat.bindings._rust import declarative_asn1 + +T = typing.TypeVar("T", covariant=True) +U = typing.TypeVar("U") + + +encode_der = declarative_asn1.encode_der + + +def _normalize_field_type( + field_type: typing.Any, field_name: str +) -> declarative_asn1.AnnotatedType: + annotation = declarative_asn1.Annotation() + + if hasattr(field_type, "__asn1_root__"): + annotated_root = field_type.__asn1_root__ + if not isinstance(annotated_root, declarative_asn1.AnnotatedType): + raise TypeError(f"unsupported root type: {annotated_root}") + return annotated_root + else: + rust_field_type = declarative_asn1.non_root_python_to_rust(field_type) + + return declarative_asn1.AnnotatedType(rust_field_type, annotation) + + +def _annotate_fields( + raw_fields: dict[str, type], +) -> dict[str, declarative_asn1.AnnotatedType]: + fields = {} + for field_name, field_type in raw_fields.items(): + # Recursively normalize the field type into something that the + # Rust code can understand. + annotated_field_type = _normalize_field_type(field_type, field_name) + fields[field_name] = annotated_field_type + + return fields + + +def _register_asn1_sequence(cls: type[U]) -> None: + raw_fields = get_type_hints(cls, include_extras=True) + root = declarative_asn1.AnnotatedType( + declarative_asn1.Type.Sequence(cls, _annotate_fields(raw_fields)), + declarative_asn1.Annotation(), + ) + + setattr(cls, "__asn1_root__", root) + + +# Due to https://github.com/python/mypy/issues/19731, we can't define an alias +# for `dataclass_transform` that conditionally points to `typing` or +# `typing_extensions` depending on the Python version (like we do for +# `get_type_hints`). +# We work around it by making the whole decorated class conditional on the +# Python version. +if sys.version_info < (3, 11): + + @typing_extensions.dataclass_transform(kw_only_default=True) + def sequence(cls: type[U]) -> type[U]: + # We use `dataclasses.dataclass` to add an __init__ method + # to the class with keyword-only parameters. + if sys.version_info >= (3, 10): + dataclass_cls = dataclasses.dataclass( + repr=False, + eq=False, + # `match_args` was added in Python 3.10 and defaults + # to True + match_args=False, + # `kw_only` was added in Python 3.10 and defaults to + # False + kw_only=True, + )(cls) + else: + dataclass_cls = dataclasses.dataclass( + repr=False, + eq=False, + )(cls) + _register_asn1_sequence(dataclass_cls) + return dataclass_cls + +else: + + @typing.dataclass_transform(kw_only_default=True) + def sequence(cls: type[U]) -> type[U]: + # Only add an __init__ method, with keyword-only + # parameters. + dataclass_cls = dataclasses.dataclass( + repr=False, + eq=False, + match_args=False, + kw_only=True, + )(cls) + _register_asn1_sequence(dataclass_cls) + return dataclass_cls diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/__init__.py new file mode 100644 index 0000000..b4400aa --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from typing import Any + + +def default_backend() -> Any: + from cryptography.hazmat.backends.openssl.backend import backend + + return backend diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..8a5db2f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__init__.py new file mode 100644 index 0000000..51b0447 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.backends.openssl.backend import backend + +__all__ = ["backend"] diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..87a07e6 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-39.pyc new file mode 100644 index 0000000..8588565 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/backend.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/backend.py new file mode 100644 index 0000000..248b8c5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1,302 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.bindings.openssl import binding +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, + OAEP, + PSS, + PKCS1v15, +) +from cryptography.hazmat.primitives.ciphers import ( + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, + Mode, +) + + +class Backend: + """ + OpenSSL API binding interfaces. + """ + + name = "openssl" + + # TripleDES encryption is disallowed/deprecated throughout 2023 in + # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA). + _fips_ciphers = (AES,) + # Sometimes SHA1 is still permissible. That logic is contained + # within the various *_supported methods. + _fips_hashes = ( + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA512_224, + hashes.SHA512_256, + hashes.SHA3_224, + hashes.SHA3_256, + hashes.SHA3_384, + hashes.SHA3_512, + hashes.SHAKE128, + hashes.SHAKE256, + ) + _fips_ecdh_curves = ( + ec.SECP224R1, + ec.SECP256R1, + ec.SECP384R1, + ec.SECP521R1, + ) + _fips_rsa_min_key_size = 2048 + _fips_rsa_min_public_exponent = 65537 + _fips_dsa_min_modulus = 1 << 2048 + _fips_dh_min_key_size = 2048 + _fips_dh_min_modulus = 1 << _fips_dh_min_key_size + + def __init__(self) -> None: + self._binding = binding.Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + self._fips_enabled = rust_openssl.is_fips_enabled() + + def __repr__(self) -> str: + return ( + f"" + ) + + def openssl_assert(self, ok: bool) -> None: + return binding._openssl_assert(ok) + + def _enable_fips(self) -> None: + # This function enables FIPS mode for OpenSSL 3.0.0 on installs that + # have the FIPS provider installed properly. + rust_openssl.enable_fips(rust_openssl._providers) + assert rust_openssl.is_fips_enabled() + self._fips_enabled = rust_openssl.is_fips_enabled() + + def openssl_version_text(self) -> str: + """ + Friendly string name of the loaded OpenSSL library. This is not + necessarily the same version as it was compiled against. + + Example: OpenSSL 3.2.1 30 Jan 2024 + """ + return rust_openssl.openssl_version_text() + + def openssl_version_number(self) -> int: + return rust_openssl.openssl_version() + + def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if self._fips_enabled and not isinstance(algorithm, self._fips_hashes): + return False + + return rust_openssl.hashes.hash_supported(algorithm) + + def signature_hash_supported( + self, algorithm: hashes.HashAlgorithm + ) -> bool: + # Dedicated check for hashing algorithm use in message digest for + # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption). + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return False + return self.hash_supported(algorithm) + + def scrypt_supported(self) -> bool: + if self._fips_enabled: + return False + else: + return hasattr(rust_openssl.kdf.Scrypt, "derive") + + def argon2_supported(self) -> bool: + if self._fips_enabled: + return False + else: + return hasattr(rust_openssl.kdf.Argon2id, "derive") + + def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + # FIPS mode still allows SHA1 for HMAC + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return True + if rust_openssl.CRYPTOGRAPHY_IS_AWSLC: + return isinstance( + algorithm, + ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA512_224, + hashes.SHA512_256, + ), + ) + return self.hash_supported(algorithm) + + def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool: + if self._fips_enabled: + # FIPS mode requires AES. TripleDES is disallowed/deprecated in + # FIPS 140-3. + if not isinstance(cipher, self._fips_ciphers): + return False + + return rust_openssl.ciphers.cipher_supported(cipher, mode) + + def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + return self.hmac_supported(algorithm) + + def _consume_errors(self) -> list[rust_openssl.OpenSSLError]: + return rust_openssl.capture_error_stack() + + def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return False + + return isinstance( + algorithm, + ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ), + ) + + def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool: + if isinstance(padding, PKCS1v15): + return True + elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): + # FIPS 186-4 only allows salt length == digest length for PSS + # It is technically acceptable to set an explicit salt length + # equal to the digest length and this will incorrectly fail, but + # since we don't do that in the tests and this method is + # private, we'll ignore that until we need to do otherwise. + if ( + self._fips_enabled + and padding._salt_length != PSS.DIGEST_LENGTH + ): + return False + return self.hash_supported(padding._mgf._algorithm) + elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): + return self._oaep_hash_supported( + padding._mgf._algorithm + ) and self._oaep_hash_supported(padding._algorithm) + else: + return False + + def rsa_encryption_supported(self, padding: AsymmetricPadding) -> bool: + if self._fips_enabled and isinstance(padding, PKCS1v15): + return False + else: + return self.rsa_padding_supported(padding) + + def dsa_supported(self) -> bool: + return ( + not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not self._fips_enabled + ) + + def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if not self.dsa_supported(): + return False + return self.signature_hash_supported(algorithm) + + def cmac_algorithm_supported(self, algorithm) -> bool: + return self.cipher_supported( + algorithm, CBC(b"\x00" * algorithm.block_size) + ) + + def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool: + if self._fips_enabled and not isinstance( + curve, self._fips_ecdh_curves + ): + return False + + return rust_openssl.ec.curve_supported(curve) + + def elliptic_curve_signature_algorithm_supported( + self, + signature_algorithm: ec.EllipticCurveSignatureAlgorithm, + curve: ec.EllipticCurve, + ) -> bool: + # We only support ECDSA right now. + if not isinstance(signature_algorithm, ec.ECDSA): + return False + + return self.elliptic_curve_supported(curve) and ( + isinstance(signature_algorithm.algorithm, asym_utils.Prehashed) + or self.hash_supported(signature_algorithm.algorithm) + ) + + def elliptic_curve_exchange_algorithm_supported( + self, algorithm: ec.ECDH, curve: ec.EllipticCurve + ) -> bool: + return self.elliptic_curve_supported(curve) and isinstance( + algorithm, ec.ECDH + ) + + def dh_supported(self) -> bool: + return ( + not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + def dh_x942_serialization_supported(self) -> bool: + return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1 + + def x25519_supported(self) -> bool: + return not self._fips_enabled + + def x448_supported(self) -> bool: + if self._fips_enabled: + return False + return ( + not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + def ed25519_supported(self) -> bool: + return not self._fips_enabled + + def ed448_supported(self) -> bool: + if self._fips_enabled: + return False + return ( + not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + def ecdsa_deterministic_supported(self) -> bool: + return ( + rust_openssl.CRYPTOGRAPHY_OPENSSL_320_OR_GREATER + and not self._fips_enabled + ) + + def poly1305_supported(self) -> bool: + return not self._fips_enabled + + def pkcs7_supported(self) -> bool: + return ( + not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + +backend = Backend() diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..4a57b62 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust.abi3.so b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust.abi3.so new file mode 100755 index 0000000..098a908 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust.abi3.so differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi new file mode 100644 index 0000000..2f4eef4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import padding +from cryptography.utils import Buffer + +class PKCS7PaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class ANSIX923PaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class PKCS7UnpaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class ANSIX923UnpaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class ObjectIdentifier: + def __init__(self, value: str) -> None: ... + @property + def dotted_string(self) -> str: ... + @property + def _name(self) -> str: ... + +T = typing.TypeVar("T") diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi new file mode 100644 index 0000000..8010008 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi @@ -0,0 +1,8 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +lib = typing.Any +ffi = typing.Any diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi new file mode 100644 index 0000000..3b5f208 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi @@ -0,0 +1,7 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +def decode_dss_signature(signature: bytes) -> tuple[int, int]: ... +def encode_dss_signature(r: int, s: int) -> bytes: ... +def parse_spki_for_data(data: bytes) -> bytes: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi new file mode 100644 index 0000000..8563c11 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi @@ -0,0 +1,32 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +import typing + +def encode_der(value: typing.Any) -> bytes: ... +def non_root_python_to_rust(cls: type) -> Type: ... + +# Type is a Rust enum with tuple variants. For now, we express the type +# annotations like this: +class Type: + Sequence: typing.ClassVar[type] + PyInt: typing.ClassVar[type] + +class Annotation: + def __new__( + cls, + ) -> Annotation: ... + +class AnnotatedType: + inner: Type + annotation: Annotation + + def __new__(cls, inner: Type, annotation: Annotation) -> AnnotatedType: ... + +class AnnotatedTypeObject: + annotated_type: AnnotatedType + value: typing.Any + + def __new__( + cls, annotated_type: AnnotatedType, value: typing.Any + ) -> AnnotatedTypeObject: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi new file mode 100644 index 0000000..09f46b1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi @@ -0,0 +1,17 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +class _Reasons: + BACKEND_MISSING_INTERFACE: _Reasons + UNSUPPORTED_HASH: _Reasons + UNSUPPORTED_CIPHER: _Reasons + UNSUPPORTED_PADDING: _Reasons + UNSUPPORTED_MGF: _Reasons + UNSUPPORTED_PUBLIC_KEY_ALGORITHM: _Reasons + UNSUPPORTED_ELLIPTIC_CURVE: _Reasons + UNSUPPORTED_SERIALIZATION: _Reasons + UNSUPPORTED_X509: _Reasons + UNSUPPORTED_EXCHANGE_ALGORITHM: _Reasons + UNSUPPORTED_DIFFIE_HELLMAN: _Reasons + UNSUPPORTED_MAC: _Reasons diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi new file mode 100644 index 0000000..103e96c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi @@ -0,0 +1,117 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import datetime +from collections.abc import Iterator + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes +from cryptography.x509 import ocsp + +class OCSPRequest: + @property + def issuer_key_hash(self) -> bytes: ... + @property + def issuer_name_hash(self) -> bytes: ... + @property + def hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def serial_number(self) -> int: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + @property + def extensions(self) -> x509.Extensions: ... + +class OCSPResponse: + @property + def responses(self) -> Iterator[OCSPSingleResponse]: ... + @property + def response_status(self) -> ocsp.OCSPResponseStatus: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_response_bytes(self) -> bytes: ... + @property + def certificates(self) -> list[x509.Certificate]: ... + @property + def responder_key_hash(self) -> bytes | None: ... + @property + def responder_name(self) -> x509.Name | None: ... + @property + def produced_at(self) -> datetime.datetime: ... + @property + def produced_at_utc(self) -> datetime.datetime: ... + @property + def certificate_status(self) -> ocsp.OCSPCertStatus: ... + @property + def revocation_time(self) -> datetime.datetime | None: ... + @property + def revocation_time_utc(self) -> datetime.datetime | None: ... + @property + def revocation_reason(self) -> x509.ReasonFlags | None: ... + @property + def this_update(self) -> datetime.datetime: ... + @property + def this_update_utc(self) -> datetime.datetime: ... + @property + def next_update(self) -> datetime.datetime | None: ... + @property + def next_update_utc(self) -> datetime.datetime | None: ... + @property + def issuer_key_hash(self) -> bytes: ... + @property + def issuer_name_hash(self) -> bytes: ... + @property + def hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def serial_number(self) -> int: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def single_extensions(self) -> x509.Extensions: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + +class OCSPSingleResponse: + @property + def certificate_status(self) -> ocsp.OCSPCertStatus: ... + @property + def revocation_time(self) -> datetime.datetime | None: ... + @property + def revocation_time_utc(self) -> datetime.datetime | None: ... + @property + def revocation_reason(self) -> x509.ReasonFlags | None: ... + @property + def this_update(self) -> datetime.datetime: ... + @property + def this_update_utc(self) -> datetime.datetime: ... + @property + def next_update(self) -> datetime.datetime | None: ... + @property + def next_update_utc(self) -> datetime.datetime | None: ... + @property + def issuer_key_hash(self) -> bytes: ... + @property + def issuer_name_hash(self) -> bytes: ... + @property + def hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def serial_number(self) -> int: ... + +def load_der_ocsp_request(data: bytes) -> ocsp.OCSPRequest: ... +def load_der_ocsp_response(data: bytes) -> ocsp.OCSPResponse: ... +def create_ocsp_request( + builder: ocsp.OCSPRequestBuilder, +) -> ocsp.OCSPRequest: ... +def create_ocsp_response( + status: ocsp.OCSPResponseStatus, + builder: ocsp.OCSPResponseBuilder | None, + private_key: PrivateKeyTypes | None, + hash_algorithm: hashes.HashAlgorithm | None, +) -> ocsp.OCSPResponse: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi new file mode 100644 index 0000000..5fb3cb2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi @@ -0,0 +1,75 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.bindings._rust.openssl import ( + aead, + ciphers, + cmac, + dh, + dsa, + ec, + ed448, + ed25519, + hashes, + hmac, + kdf, + keys, + poly1305, + rsa, + x448, + x25519, +) + +__all__ = [ + "aead", + "ciphers", + "cmac", + "dh", + "dsa", + "ec", + "ed448", + "ed25519", + "hashes", + "hmac", + "kdf", + "keys", + "openssl_version", + "openssl_version_text", + "poly1305", + "raise_openssl_error", + "rsa", + "x448", + "x25519", +] + +CRYPTOGRAPHY_IS_LIBRESSL: bool +CRYPTOGRAPHY_IS_BORINGSSL: bool +CRYPTOGRAPHY_IS_AWSLC: bool +CRYPTOGRAPHY_OPENSSL_300_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_309_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_320_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_330_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_350_OR_GREATER: bool + +class Providers: ... + +_legacy_provider_loaded: bool +_providers: Providers + +def openssl_version() -> int: ... +def openssl_version_text() -> str: ... +def raise_openssl_error() -> typing.NoReturn: ... +def capture_error_stack() -> list[OpenSSLError]: ... +def is_fips_enabled() -> bool: ... +def enable_fips(providers: Providers) -> None: ... + +class OpenSSLError: + @property + def lib(self) -> int: ... + @property + def reason(self) -> int: ... + @property + def reason_text(self) -> bytes: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi new file mode 100644 index 0000000..831fcd1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi @@ -0,0 +1,107 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from collections.abc import Sequence + +from cryptography.utils import Buffer + +class AESGCM: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class ChaCha20Poly1305: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key() -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class AESCCM: + def __init__(self, key: Buffer, tag_length: int = 16) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class AESSIV: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + data: Buffer, + associated_data: Sequence[Buffer] | None, + ) -> bytes: ... + def decrypt( + self, + data: Buffer, + associated_data: Sequence[Buffer] | None, + ) -> bytes: ... + +class AESOCB3: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class AESGCMSIV: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi new file mode 100644 index 0000000..a48fb01 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi @@ -0,0 +1,38 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import ciphers +from cryptography.hazmat.primitives.ciphers import modes + +@typing.overload +def create_encryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag +) -> ciphers.AEADEncryptionContext: ... +@typing.overload +def create_encryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None +) -> ciphers.CipherContext: ... +@typing.overload +def create_decryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag +) -> ciphers.AEADDecryptionContext: ... +@typing.overload +def create_decryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None +) -> ciphers.CipherContext: ... +def cipher_supported( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode +) -> bool: ... +def _advance( + ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int +) -> None: ... +def _advance_aad( + ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int +) -> None: ... + +class CipherContext: ... +class AEADEncryptionContext: ... +class AEADDecryptionContext: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi new file mode 100644 index 0000000..9c03508 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi @@ -0,0 +1,18 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import ciphers + +class CMAC: + def __init__( + self, + algorithm: ciphers.BlockCipherAlgorithm, + backend: typing.Any = None, + ) -> None: ... + def update(self, data: bytes) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, signature: bytes) -> None: ... + def copy(self) -> CMAC: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi new file mode 100644 index 0000000..08733d7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi @@ -0,0 +1,51 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import dh + +MIN_MODULUS_SIZE: int + +class DHPrivateKey: ... +class DHPublicKey: ... +class DHParameters: ... + +class DHPrivateNumbers: + def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None: ... + def private_key(self, backend: typing.Any = None) -> dh.DHPrivateKey: ... + @property + def x(self) -> int: ... + @property + def public_numbers(self) -> DHPublicNumbers: ... + +class DHPublicNumbers: + def __init__( + self, y: int, parameter_numbers: DHParameterNumbers + ) -> None: ... + def public_key(self, backend: typing.Any = None) -> dh.DHPublicKey: ... + @property + def y(self) -> int: ... + @property + def parameter_numbers(self) -> DHParameterNumbers: ... + +class DHParameterNumbers: + def __init__(self, p: int, g: int, q: int | None = None) -> None: ... + def parameters(self, backend: typing.Any = None) -> dh.DHParameters: ... + @property + def p(self) -> int: ... + @property + def g(self) -> int: ... + @property + def q(self) -> int | None: ... + +def generate_parameters( + generator: int, key_size: int, backend: typing.Any = None +) -> dh.DHParameters: ... +def from_pem_parameters( + data: bytes, backend: typing.Any = None +) -> dh.DHParameters: ... +def from_der_parameters( + data: bytes, backend: typing.Any = None +) -> dh.DHParameters: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi new file mode 100644 index 0000000..0922a4c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi @@ -0,0 +1,41 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import dsa + +class DSAPrivateKey: ... +class DSAPublicKey: ... +class DSAParameters: ... + +class DSAPrivateNumbers: + def __init__(self, x: int, public_numbers: DSAPublicNumbers) -> None: ... + @property + def x(self) -> int: ... + @property + def public_numbers(self) -> DSAPublicNumbers: ... + def private_key(self, backend: typing.Any = None) -> dsa.DSAPrivateKey: ... + +class DSAPublicNumbers: + def __init__( + self, y: int, parameter_numbers: DSAParameterNumbers + ) -> None: ... + @property + def y(self) -> int: ... + @property + def parameter_numbers(self) -> DSAParameterNumbers: ... + def public_key(self, backend: typing.Any = None) -> dsa.DSAPublicKey: ... + +class DSAParameterNumbers: + def __init__(self, p: int, q: int, g: int) -> None: ... + @property + def p(self) -> int: ... + @property + def q(self) -> int: ... + @property + def g(self) -> int: ... + def parameters(self, backend: typing.Any = None) -> dsa.DSAParameters: ... + +def generate_parameters(key_size: int) -> dsa.DSAParameters: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi new file mode 100644 index 0000000..5c3b7bf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import ec + +class ECPrivateKey: ... +class ECPublicKey: ... + +class EllipticCurvePrivateNumbers: + def __init__( + self, private_value: int, public_numbers: EllipticCurvePublicNumbers + ) -> None: ... + def private_key( + self, backend: typing.Any = None + ) -> ec.EllipticCurvePrivateKey: ... + @property + def private_value(self) -> int: ... + @property + def public_numbers(self) -> EllipticCurvePublicNumbers: ... + +class EllipticCurvePublicNumbers: + def __init__(self, x: int, y: int, curve: ec.EllipticCurve) -> None: ... + def public_key( + self, backend: typing.Any = None + ) -> ec.EllipticCurvePublicKey: ... + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + @property + def curve(self) -> ec.EllipticCurve: ... + def __eq__(self, other: object) -> bool: ... + +def curve_supported(curve: ec.EllipticCurve) -> bool: ... +def generate_private_key( + curve: ec.EllipticCurve, backend: typing.Any = None +) -> ec.EllipticCurvePrivateKey: ... +def from_private_numbers( + numbers: ec.EllipticCurvePrivateNumbers, +) -> ec.EllipticCurvePrivateKey: ... +def from_public_numbers( + numbers: ec.EllipticCurvePublicNumbers, +) -> ec.EllipticCurvePublicKey: ... +def from_public_bytes( + curve: ec.EllipticCurve, data: bytes +) -> ec.EllipticCurvePublicKey: ... +def derive_private_key( + private_value: int, curve: ec.EllipticCurve +) -> ec.EllipticCurvePrivateKey: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi new file mode 100644 index 0000000..f85b3d1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import ed25519 +from cryptography.utils import Buffer + +class Ed25519PrivateKey: ... +class Ed25519PublicKey: ... + +def generate_key() -> ed25519.Ed25519PrivateKey: ... +def from_private_bytes(data: Buffer) -> ed25519.Ed25519PrivateKey: ... +def from_public_bytes(data: bytes) -> ed25519.Ed25519PublicKey: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi new file mode 100644 index 0000000..c8ca0ec --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import ed448 +from cryptography.utils import Buffer + +class Ed448PrivateKey: ... +class Ed448PublicKey: ... + +def generate_key() -> ed448.Ed448PrivateKey: ... +def from_private_bytes(data: Buffer) -> ed448.Ed448PrivateKey: ... +def from_public_bytes(data: bytes) -> ed448.Ed448PublicKey: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi new file mode 100644 index 0000000..6bfd295 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi @@ -0,0 +1,28 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import hashes +from cryptography.utils import Buffer + +class Hash(hashes.HashContext): + def __init__( + self, algorithm: hashes.HashAlgorithm, backend: typing.Any = None + ) -> None: ... + @property + def algorithm(self) -> hashes.HashAlgorithm: ... + def update(self, data: Buffer) -> None: ... + def finalize(self) -> bytes: ... + def copy(self) -> Hash: ... + +def hash_supported(algorithm: hashes.HashAlgorithm) -> bool: ... + +class XOFHash: + def __init__(self, algorithm: hashes.ExtendableOutputFunction) -> None: ... + @property + def algorithm(self) -> hashes.ExtendableOutputFunction: ... + def update(self, data: Buffer) -> None: ... + def squeeze(self, length: int) -> bytes: ... + def copy(self) -> XOFHash: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi new file mode 100644 index 0000000..3883d1b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi @@ -0,0 +1,22 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import hashes +from cryptography.utils import Buffer + +class HMAC(hashes.HashContext): + def __init__( + self, + key: Buffer, + algorithm: hashes.HashAlgorithm, + backend: typing.Any = None, + ) -> None: ... + @property + def algorithm(self) -> hashes.HashAlgorithm: ... + def update(self, data: Buffer) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, signature: bytes) -> None: ... + def copy(self) -> HMAC: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi new file mode 100644 index 0000000..9e2d8d9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi @@ -0,0 +1,72 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.hashes import HashAlgorithm +from cryptography.utils import Buffer + +def derive_pbkdf2_hmac( + key_material: Buffer, + algorithm: HashAlgorithm, + salt: bytes, + iterations: int, + length: int, +) -> bytes: ... + +class Scrypt: + def __init__( + self, + salt: bytes, + length: int, + n: int, + r: int, + p: int, + backend: typing.Any = None, + ) -> None: ... + def derive(self, key_material: Buffer) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... + +class Argon2id: + def __init__( + self, + *, + salt: bytes, + length: int, + iterations: int, + lanes: int, + memory_cost: int, + ad: bytes | None = None, + secret: bytes | None = None, + ) -> None: ... + def derive(self, key_material: bytes) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... + def derive_phc_encoded(self, key_material: bytes) -> str: ... + @classmethod + def verify_phc_encoded( + cls, key_material: bytes, phc_encoded: str, secret: bytes | None = None + ) -> None: ... + +class HKDF: + def __init__( + self, + algorithm: HashAlgorithm, + length: int, + salt: bytes | None, + info: bytes | None, + backend: typing.Any = None, + ): ... + def derive(self, key_material: Buffer) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... + +class HKDFExpand: + def __init__( + self, + algorithm: HashAlgorithm, + length: int, + info: bytes | None, + backend: typing.Any = None, + ): ... + def derive(self, key_material: Buffer) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi new file mode 100644 index 0000000..404057e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi @@ -0,0 +1,34 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric.types import ( + PrivateKeyTypes, + PublicKeyTypes, +) +from cryptography.utils import Buffer + +def load_der_private_key( + data: Buffer, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> PrivateKeyTypes: ... +def load_pem_private_key( + data: Buffer, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> PrivateKeyTypes: ... +def load_der_public_key( + data: bytes, + backend: typing.Any = None, +) -> PublicKeyTypes: ... +def load_pem_public_key( + data: bytes, + backend: typing.Any = None, +) -> PublicKeyTypes: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi new file mode 100644 index 0000000..45a2a39 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.utils import Buffer + +class Poly1305: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_tag(key: Buffer, data: Buffer) -> bytes: ... + @staticmethod + def verify_tag(key: Buffer, data: Buffer, tag: bytes) -> None: ... + def update(self, data: Buffer) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, tag: bytes) -> None: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi new file mode 100644 index 0000000..ef7752d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi @@ -0,0 +1,55 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import rsa + +class RSAPrivateKey: ... +class RSAPublicKey: ... + +class RSAPrivateNumbers: + def __init__( + self, + p: int, + q: int, + d: int, + dmp1: int, + dmq1: int, + iqmp: int, + public_numbers: RSAPublicNumbers, + ) -> None: ... + @property + def p(self) -> int: ... + @property + def q(self) -> int: ... + @property + def d(self) -> int: ... + @property + def dmp1(self) -> int: ... + @property + def dmq1(self) -> int: ... + @property + def iqmp(self) -> int: ... + @property + def public_numbers(self) -> RSAPublicNumbers: ... + def private_key( + self, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, + ) -> rsa.RSAPrivateKey: ... + +class RSAPublicNumbers: + def __init__(self, e: int, n: int) -> None: ... + @property + def n(self) -> int: ... + @property + def e(self) -> int: ... + def public_key(self, backend: typing.Any = None) -> rsa.RSAPublicKey: ... + +def generate_private_key( + public_exponent: int, + key_size: int, +) -> rsa.RSAPrivateKey: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi new file mode 100644 index 0000000..38d2add --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import x25519 +from cryptography.utils import Buffer + +class X25519PrivateKey: ... +class X25519PublicKey: ... + +def generate_key() -> x25519.X25519PrivateKey: ... +def from_private_bytes(data: Buffer) -> x25519.X25519PrivateKey: ... +def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi new file mode 100644 index 0000000..3ac0980 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import x448 +from cryptography.utils import Buffer + +class X448PrivateKey: ... +class X448PublicKey: ... + +def generate_key() -> x448.X448PrivateKey: ... +def from_private_bytes(data: Buffer) -> x448.X448PrivateKey: ... +def from_public_bytes(data: bytes) -> x448.X448PublicKey: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi new file mode 100644 index 0000000..b25becb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing +from collections.abc import Iterable + +from cryptography import x509 +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes +from cryptography.hazmat.primitives.serialization import ( + KeySerializationEncryption, +) +from cryptography.hazmat.primitives.serialization.pkcs12 import ( + PKCS12KeyAndCertificates, + PKCS12PrivateKeyTypes, +) +from cryptography.utils import Buffer + +class PKCS12Certificate: + def __init__( + self, cert: x509.Certificate, friendly_name: bytes | None + ) -> None: ... + @property + def friendly_name(self) -> bytes | None: ... + @property + def certificate(self) -> x509.Certificate: ... + +def load_key_and_certificates( + data: Buffer, + password: Buffer | None, + backend: typing.Any = None, +) -> tuple[ + PrivateKeyTypes | None, + x509.Certificate | None, + list[x509.Certificate], +]: ... +def load_pkcs12( + data: bytes, + password: bytes | None, + backend: typing.Any = None, +) -> PKCS12KeyAndCertificates: ... +def serialize_java_truststore( + certs: Iterable[PKCS12Certificate], + encryption_algorithm: KeySerializationEncryption, +) -> bytes: ... +def serialize_key_and_certificates( + name: bytes | None, + key: PKCS12PrivateKeyTypes | None, + cert: x509.Certificate | None, + cas: Iterable[x509.Certificate | PKCS12Certificate] | None, + encryption_algorithm: KeySerializationEncryption, +) -> bytes: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi new file mode 100644 index 0000000..358b135 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi @@ -0,0 +1,50 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from collections.abc import Iterable + +from cryptography import x509 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives.serialization import pkcs7 + +def serialize_certificates( + certs: list[x509.Certificate], + encoding: serialization.Encoding, +) -> bytes: ... +def encrypt_and_serialize( + builder: pkcs7.PKCS7EnvelopeBuilder, + content_encryption_algorithm: pkcs7.ContentEncryptionAlgorithm, + encoding: serialization.Encoding, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def sign_and_serialize( + builder: pkcs7.PKCS7SignatureBuilder, + encoding: serialization.Encoding, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def decrypt_der( + data: bytes, + certificate: x509.Certificate, + private_key: rsa.RSAPrivateKey, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def decrypt_pem( + data: bytes, + certificate: x509.Certificate, + private_key: rsa.RSAPrivateKey, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def decrypt_smime( + data: bytes, + certificate: x509.Certificate, + private_key: rsa.RSAPrivateKey, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def load_pem_pkcs7_certificates( + data: bytes, +) -> list[x509.Certificate]: ... +def load_der_pkcs7_certificates( + data: bytes, +) -> list[x509.Certificate]: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi new file mode 100644 index 0000000..c6c6d0b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography import x509 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.serialization import pkcs7 +from cryptography.utils import Buffer + +class TestCertificate: + not_after_tag: int + not_before_tag: int + issuer_value_tags: list[int] + subject_value_tags: list[int] + +def test_parse_certificate(data: bytes) -> TestCertificate: ... +def pkcs7_verify( + encoding: serialization.Encoding, + sig: bytes, + msg: Buffer | None, + certs: list[x509.Certificate], + options: list[pkcs7.PKCS7Options], +) -> None: ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi new file mode 100644 index 0000000..83c3441 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi @@ -0,0 +1,301 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import datetime +import typing +from collections.abc import Iterator + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric.ec import ECDSA +from cryptography.hazmat.primitives.asymmetric.padding import PSS, PKCS1v15 +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPublicKeyTypes, + CertificatePublicKeyTypes, + PrivateKeyTypes, +) +from cryptography.x509 import certificate_transparency + +def load_pem_x509_certificate( + data: bytes, backend: typing.Any = None +) -> x509.Certificate: ... +def load_der_x509_certificate( + data: bytes, backend: typing.Any = None +) -> x509.Certificate: ... +def load_pem_x509_certificates( + data: bytes, +) -> list[x509.Certificate]: ... +def load_pem_x509_crl( + data: bytes, backend: typing.Any = None +) -> x509.CertificateRevocationList: ... +def load_der_x509_crl( + data: bytes, backend: typing.Any = None +) -> x509.CertificateRevocationList: ... +def load_pem_x509_csr( + data: bytes, backend: typing.Any = None +) -> x509.CertificateSigningRequest: ... +def load_der_x509_csr( + data: bytes, backend: typing.Any = None +) -> x509.CertificateSigningRequest: ... +def encode_name_bytes(name: x509.Name) -> bytes: ... +def encode_extension_value(extension: x509.ExtensionType) -> bytes: ... +def create_x509_certificate( + builder: x509.CertificateBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, + ecdsa_deterministic: bool | None, +) -> x509.Certificate: ... +def create_x509_csr( + builder: x509.CertificateSigningRequestBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, + ecdsa_deterministic: bool | None, +) -> x509.CertificateSigningRequest: ... +def create_x509_crl( + builder: x509.CertificateRevocationListBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, + ecdsa_deterministic: bool | None, +) -> x509.CertificateRevocationList: ... + +class Sct: + @property + def version(self) -> certificate_transparency.Version: ... + @property + def log_id(self) -> bytes: ... + @property + def timestamp(self) -> datetime.datetime: ... + @property + def entry_type(self) -> certificate_transparency.LogEntryType: ... + @property + def signature_hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def signature_algorithm( + self, + ) -> certificate_transparency.SignatureAlgorithm: ... + @property + def signature(self) -> bytes: ... + @property + def extension_bytes(self) -> bytes: ... + +class Certificate: + def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ... + @property + def serial_number(self) -> int: ... + @property + def version(self) -> x509.Version: ... + def public_key(self) -> CertificatePublicKeyTypes: ... + @property + def public_key_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def not_valid_before(self) -> datetime.datetime: ... + @property + def not_valid_before_utc(self) -> datetime.datetime: ... + @property + def not_valid_after(self) -> datetime.datetime: ... + @property + def not_valid_after_utc(self) -> datetime.datetime: ... + @property + def issuer(self) -> x509.Name: ... + @property + def subject(self) -> x509.Name: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_algorithm_parameters( + self, + ) -> PSS | PKCS1v15 | ECDSA | None: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_certificate_bytes(self) -> bytes: ... + @property + def tbs_precertificate_bytes(self) -> bytes: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + def verify_directly_issued_by(self, issuer: Certificate) -> None: ... + +class RevokedCertificate: ... + +class CertificateRevocationList: + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ... + def get_revoked_certificate_by_serial_number( + self, serial_number: int + ) -> x509.RevokedCertificate | None: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_algorithm_parameters( + self, + ) -> PSS | PKCS1v15 | ECDSA | None: ... + @property + def issuer(self) -> x509.Name: ... + @property + def next_update(self) -> datetime.datetime | None: ... + @property + def next_update_utc(self) -> datetime.datetime | None: ... + @property + def last_update(self) -> datetime.datetime: ... + @property + def last_update_utc(self) -> datetime.datetime: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_certlist_bytes(self) -> bytes: ... + def __eq__(self, other: object) -> bool: ... + def __len__(self) -> int: ... + @typing.overload + def __getitem__(self, idx: int) -> x509.RevokedCertificate: ... + @typing.overload + def __getitem__(self, idx: slice) -> list[x509.RevokedCertificate]: ... + def __iter__(self) -> Iterator[x509.RevokedCertificate]: ... + def is_signature_valid( + self, public_key: CertificateIssuerPublicKeyTypes + ) -> bool: ... + +class CertificateSigningRequest: + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def public_key(self) -> CertificatePublicKeyTypes: ... + @property + def subject(self) -> x509.Name: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_algorithm_parameters( + self, + ) -> PSS | PKCS1v15 | ECDSA | None: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def attributes(self) -> x509.Attributes: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_certrequest_bytes(self) -> bytes: ... + @property + def is_signature_valid(self) -> bool: ... + +class PolicyBuilder: + def time(self, time: datetime.datetime) -> PolicyBuilder: ... + def store(self, store: Store) -> PolicyBuilder: ... + def max_chain_depth(self, max_chain_depth: int) -> PolicyBuilder: ... + def extension_policies( + self, *, ca_policy: ExtensionPolicy, ee_policy: ExtensionPolicy + ) -> PolicyBuilder: ... + def build_client_verifier(self) -> ClientVerifier: ... + def build_server_verifier( + self, subject: x509.verification.Subject + ) -> ServerVerifier: ... + +class Policy: + @property + def max_chain_depth(self) -> int: ... + @property + def subject(self) -> x509.verification.Subject | None: ... + @property + def validation_time(self) -> datetime.datetime: ... + @property + def extended_key_usage(self) -> x509.ObjectIdentifier: ... + @property + def minimum_rsa_modulus(self) -> int: ... + +class Criticality: + CRITICAL: Criticality + AGNOSTIC: Criticality + NON_CRITICAL: Criticality + +T = typing.TypeVar("T", contravariant=True, bound=x509.ExtensionType) + +MaybeExtensionValidatorCallback = typing.Callable[ + [ + Policy, + x509.Certificate, + T | None, + ], + None, +] + +PresentExtensionValidatorCallback = typing.Callable[ + [Policy, x509.Certificate, T], + None, +] + +class ExtensionPolicy: + @staticmethod + def permit_all() -> ExtensionPolicy: ... + @staticmethod + def webpki_defaults_ca() -> ExtensionPolicy: ... + @staticmethod + def webpki_defaults_ee() -> ExtensionPolicy: ... + def require_not_present( + self, extension_type: type[x509.ExtensionType] + ) -> ExtensionPolicy: ... + def may_be_present( + self, + extension_type: type[T], + criticality: Criticality, + validator: MaybeExtensionValidatorCallback[T] | None, + ) -> ExtensionPolicy: ... + def require_present( + self, + extension_type: type[T], + criticality: Criticality, + validator: PresentExtensionValidatorCallback[T] | None, + ) -> ExtensionPolicy: ... + +class VerifiedClient: + @property + def subjects(self) -> list[x509.GeneralName] | None: ... + @property + def chain(self) -> list[x509.Certificate]: ... + +class ClientVerifier: + @property + def policy(self) -> Policy: ... + @property + def store(self) -> Store: ... + def verify( + self, + leaf: x509.Certificate, + intermediates: list[x509.Certificate], + ) -> VerifiedClient: ... + +class ServerVerifier: + @property + def policy(self) -> Policy: ... + @property + def store(self) -> Store: ... + def verify( + self, + leaf: x509.Certificate, + intermediates: list[x509.Certificate], + ) -> list[x509.Certificate]: ... + +class Store: + def __init__(self, certs: list[x509.Certificate]) -> None: ... + +class VerificationError(Exception): ... diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..f48f957 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-39.pyc new file mode 100644 index 0000000..ef7260f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-39.pyc new file mode 100644 index 0000000..1d3f71b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py new file mode 100644 index 0000000..063bcf5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py @@ -0,0 +1,207 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + + +def cryptography_has_set_cert_cb() -> list[str]: + return [ + "SSL_CTX_set_cert_cb", + "SSL_set_cert_cb", + ] + + +def cryptography_has_ssl_st() -> list[str]: + return [ + "SSL_ST_BEFORE", + "SSL_ST_OK", + "SSL_ST_INIT", + "SSL_ST_RENEGOTIATE", + ] + + +def cryptography_has_tls_st() -> list[str]: + return [ + "TLS_ST_BEFORE", + "TLS_ST_OK", + ] + + +def cryptography_has_ssl_sigalgs() -> list[str]: + return [ + "SSL_CTX_set1_sigalgs_list", + ] + + +def cryptography_has_psk() -> list[str]: + return [ + "SSL_CTX_use_psk_identity_hint", + "SSL_CTX_set_psk_server_callback", + "SSL_CTX_set_psk_client_callback", + ] + + +def cryptography_has_psk_tlsv13() -> list[str]: + return [ + "SSL_CTX_set_psk_find_session_callback", + "SSL_CTX_set_psk_use_session_callback", + "Cryptography_SSL_SESSION_new", + "SSL_CIPHER_find", + "SSL_SESSION_set1_master_key", + "SSL_SESSION_set_cipher", + "SSL_SESSION_set_protocol_version", + ] + + +def cryptography_has_custom_ext() -> list[str]: + return [ + "SSL_CTX_add_client_custom_ext", + "SSL_CTX_add_server_custom_ext", + "SSL_extension_supported", + ] + + +def cryptography_has_tlsv13_functions() -> list[str]: + return [ + "SSL_CTX_set_ciphersuites", + ] + + +def cryptography_has_tlsv13_hs_functions() -> list[str]: + return [ + "SSL_VERIFY_POST_HANDSHAKE", + "SSL_verify_client_post_handshake", + "SSL_CTX_set_post_handshake_auth", + "SSL_set_post_handshake_auth", + "SSL_SESSION_get_max_early_data", + "SSL_write_early_data", + "SSL_read_early_data", + "SSL_CTX_set_max_early_data", + ] + + +def cryptography_has_ssl_verify_client_post_handshake() -> list[str]: + return [ + "SSL_verify_client_post_handshake", + ] + + +def cryptography_has_engine() -> list[str]: + return [ + "ENGINE_by_id", + "ENGINE_init", + "ENGINE_finish", + "ENGINE_get_default_RAND", + "ENGINE_set_default_RAND", + "ENGINE_unregister_RAND", + "ENGINE_ctrl_cmd", + "ENGINE_free", + "ENGINE_get_name", + "ENGINE_ctrl_cmd_string", + "ENGINE_load_builtin_engines", + "ENGINE_load_private_key", + "ENGINE_load_public_key", + "SSL_CTX_set_client_cert_engine", + ] + + +def cryptography_has_verified_chain() -> list[str]: + return [ + "SSL_get0_verified_chain", + ] + + +def cryptography_has_srtp() -> list[str]: + return [ + "SSL_CTX_set_tlsext_use_srtp", + "SSL_set_tlsext_use_srtp", + "SSL_get_selected_srtp_profile", + ] + + +def cryptography_has_op_no_renegotiation() -> list[str]: + return [ + "SSL_OP_NO_RENEGOTIATION", + ] + + +def cryptography_has_dtls_get_data_mtu() -> list[str]: + return [ + "DTLS_get_data_mtu", + ] + + +def cryptography_has_ssl_cookie() -> list[str]: + return [ + "SSL_OP_COOKIE_EXCHANGE", + "DTLSv1_listen", + "SSL_CTX_set_cookie_generate_cb", + "SSL_CTX_set_cookie_verify_cb", + ] + + +def cryptography_has_prime_checks() -> list[str]: + return [ + "BN_prime_checks_for_size", + ] + + +def cryptography_has_unexpected_eof_while_reading() -> list[str]: + return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"] + + +def cryptography_has_ssl_op_ignore_unexpected_eof() -> list[str]: + return [ + "SSL_OP_IGNORE_UNEXPECTED_EOF", + ] + + +def cryptography_has_get_extms_support() -> list[str]: + return ["SSL_get_extms_support"] + + +def cryptography_has_ssl_get0_group_name() -> list[str]: + return ["SSL_get0_group_name"] + + +# This is a mapping of +# {condition: function-returning-names-dependent-on-that-condition} so we can +# loop over them and delete unsupported names at runtime. It will be removed +# when cffi supports #if in cdef. We use functions instead of just a dict of +# lists so we can use coverage to measure which are used. +CONDITIONAL_NAMES = { + "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb, + "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st, + "Cryptography_HAS_TLS_ST": cryptography_has_tls_st, + "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs, + "Cryptography_HAS_PSK": cryptography_has_psk, + "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13, + "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext, + "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions, + "Cryptography_HAS_TLSv1_3_HS_FUNCTIONS": ( + cryptography_has_tlsv13_hs_functions + ), + "Cryptography_HAS_SSL_VERIFY_CLIENT_POST_HANDSHAKE": ( + cryptography_has_ssl_verify_client_post_handshake + ), + "Cryptography_HAS_ENGINE": cryptography_has_engine, + "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain, + "Cryptography_HAS_SRTP": cryptography_has_srtp, + "Cryptography_HAS_OP_NO_RENEGOTIATION": ( + cryptography_has_op_no_renegotiation + ), + "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu, + "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie, + "Cryptography_HAS_PRIME_CHECKS": cryptography_has_prime_checks, + "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": ( + cryptography_has_unexpected_eof_while_reading + ), + "Cryptography_HAS_SSL_OP_IGNORE_UNEXPECTED_EOF": ( + cryptography_has_ssl_op_ignore_unexpected_eof + ), + "Cryptography_HAS_GET_EXTMS_SUPPORT": cryptography_has_get_extms_support, + "Cryptography_HAS_SSL_GET0_GROUP_NAME": ( + cryptography_has_ssl_get0_group_name + ), +} diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/binding.py new file mode 100644 index 0000000..4494c71 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1,137 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import os +import sys +import threading +import types +import typing +import warnings +from collections.abc import Callable + +import cryptography +from cryptography.exceptions import InternalError +from cryptography.hazmat.bindings._rust import _openssl, openssl +from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES +from cryptography.utils import CryptographyDeprecationWarning + + +def _openssl_assert(ok: bool) -> None: + if not ok: + errors = openssl.capture_error_stack() + + raise InternalError( + "Unknown OpenSSL error. This error is commonly encountered when " + "another library is not cleaning up the OpenSSL error stack. If " + "you are using cryptography with another library that uses " + "OpenSSL try disabling it before reporting a bug. Otherwise " + "please file an issue at https://github.com/pyca/cryptography/" + "issues with information on how to reproduce " + f"this. ({errors!r})", + errors, + ) + + +def build_conditional_library( + lib: typing.Any, + conditional_names: dict[str, Callable[[], list[str]]], +) -> typing.Any: + conditional_lib = types.ModuleType("lib") + conditional_lib._original_lib = lib # type: ignore[attr-defined] + excluded_names = set() + for condition, names_cb in conditional_names.items(): + if not getattr(lib, condition): + excluded_names.update(names_cb()) + + for attr in dir(lib): + if attr not in excluded_names: + setattr(conditional_lib, attr, getattr(lib, attr)) + + return conditional_lib + + +class Binding: + """ + OpenSSL API wrapper. + """ + + lib: typing.ClassVar[typing.Any] = None + ffi = _openssl.ffi + _lib_loaded = False + _init_lock = threading.Lock() + + def __init__(self) -> None: + self._ensure_ffi_initialized() + + @classmethod + def _ensure_ffi_initialized(cls) -> None: + with cls._init_lock: + if not cls._lib_loaded: + cls.lib = build_conditional_library( + _openssl.lib, CONDITIONAL_NAMES + ) + cls._lib_loaded = True + + @classmethod + def init_static_locks(cls) -> None: + cls._ensure_ffi_initialized() + + +def _verify_package_version(version: str) -> None: + # Occasionally we run into situations where the version of the Python + # package does not match the version of the shared object that is loaded. + # This may occur in environments where multiple versions of cryptography + # are installed and available in the python path. To avoid errors cropping + # up later this code checks that the currently imported package and the + # shared object that were loaded have the same version and raise an + # ImportError if they do not + so_package_version = _openssl.ffi.string( + _openssl.lib.CRYPTOGRAPHY_PACKAGE_VERSION + ) + if version.encode("ascii") != so_package_version: + raise ImportError( + "The version of cryptography does not match the loaded " + "shared object. This can happen if you have multiple copies of " + "cryptography installed in your Python path. Please try creating " + "a new virtual environment to resolve this issue. " + f"Loaded python version: {version}, " + f"shared object version: {so_package_version}" + ) + + _openssl_assert( + _openssl.lib.OpenSSL_version_num() == openssl.openssl_version(), + ) + + +_verify_package_version(cryptography.__version__) + +Binding.init_static_locks() + +if ( + sys.platform == "win32" + and os.environ.get("PROCESSOR_ARCHITEW6432") is not None +): + warnings.warn( + "You are using cryptography on a 32-bit Python on a 64-bit Windows " + "Operating System. Cryptography will be significantly faster if you " + "switch to using a 64-bit Python.", + UserWarning, + stacklevel=2, + ) + +if ( + not openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not openssl.CRYPTOGRAPHY_IS_AWSLC + and not openssl.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER +): + warnings.warn( + "You are using OpenSSL < 3.0. Support for OpenSSL < 3.0 is deprecated " + "and will be removed in the next release. Please upgrade to OpenSSL " + "3.0 or later.", + CryptographyDeprecationWarning, + stacklevel=2, + ) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/__init__.py new file mode 100644 index 0000000..41d7318 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..96287cf Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py new file mode 100644 index 0000000..41d7318 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..89d2e3b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-39.pyc new file mode 100644 index 0000000..4a7bac0 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py new file mode 100644 index 0000000..072a991 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py @@ -0,0 +1,112 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, + _verify_key_size, +) + + +class ARC4(CipherAlgorithm): + name = "RC4" + key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class TripleDES(BlockCipherAlgorithm): + name = "3DES" + block_size = 64 + key_sizes = frozenset([64, 128, 192]) + + def __init__(self, key: bytes): + if len(key) == 8: + key += key + key + elif len(key) == 16: + key += key[:8] + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +# Not actually supported, marker for tests +class _DES: + key_size = 64 + + +class Blowfish(BlockCipherAlgorithm): + name = "Blowfish" + block_size = 64 + key_sizes = frozenset(range(32, 449, 8)) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class CAST5(BlockCipherAlgorithm): + name = "CAST5" + block_size = 64 + key_sizes = frozenset(range(40, 129, 8)) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class SEED(BlockCipherAlgorithm): + name = "SEED" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class IDEA(BlockCipherAlgorithm): + name = "IDEA" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +# This class only allows RC2 with a 128-bit key. No support for +# effective key bits or other key sizes is provided. +class RC2(BlockCipherAlgorithm): + name = "RC2" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..39c49b3 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-39.pyc new file mode 100644 index 0000000..84c7855 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-39.pyc new file mode 100644 index 0000000..ba3a8f7 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-39.pyc new file mode 100644 index 0000000..da278db Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-39.pyc new file mode 100644 index 0000000..ebe2146 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-39.pyc new file mode 100644 index 0000000..ca3182b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-39.pyc new file mode 100644 index 0000000..34bece9 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-39.pyc new file mode 100644 index 0000000..8c6ef7d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-39.pyc new file mode 100644 index 0000000..3720859 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-39.pyc new file mode 100644 index 0000000..64b9157 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-39.pyc new file mode 100644 index 0000000..aa19a2e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/_asymmetric.py new file mode 100644 index 0000000..ea55ffd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/_asymmetric.py @@ -0,0 +1,19 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +# This exists to break an import cycle. It is normally accessible from the +# asymmetric padding module. + + +class AsymmetricPadding(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this padding (e.g. "PSS", "PKCS1"). + """ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py new file mode 100644 index 0000000..305a9fd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py @@ -0,0 +1,60 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils + +# This exists to break an import cycle. It is normally accessible from the +# ciphers module. + + +class CipherAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this mode (e.g. "AES", "Camellia"). + """ + + @property + @abc.abstractmethod + def key_sizes(self) -> frozenset[int]: + """ + Valid key sizes for this algorithm in bits + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The size of the key being used as an integer in bits (e.g. 128, 256). + """ + + +class BlockCipherAlgorithm(CipherAlgorithm): + key: utils.Buffer + + @property + @abc.abstractmethod + def block_size(self) -> int: + """ + The size of a block as an integer in bits (e.g. 64, 128). + """ + + +def _verify_key_size( + algorithm: CipherAlgorithm, key: utils.Buffer +) -> utils.Buffer: + # Verify that the key is instance of bytes + utils._check_byteslike("key", key) + + # Verify that the key size matches the expected key size + if len(key) * 8 not in algorithm.key_sizes: + raise ValueError( + f"Invalid key size ({len(key) * 8}) for {algorithm.name}." + ) + return key diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/_serialization.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/_serialization.py new file mode 100644 index 0000000..e998865 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/_serialization.py @@ -0,0 +1,168 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.hazmat.primitives.hashes import HashAlgorithm + +# This exists to break an import cycle. These classes are normally accessible +# from the serialization module. + + +class PBES(utils.Enum): + PBESv1SHA1And3KeyTripleDESCBC = "PBESv1 using SHA1 and 3-Key TripleDES" + PBESv2SHA256AndAES256CBC = "PBESv2 using SHA256 PBKDF2 and AES256 CBC" + + +class Encoding(utils.Enum): + PEM = "PEM" + DER = "DER" + OpenSSH = "OpenSSH" + Raw = "Raw" + X962 = "ANSI X9.62" + SMIME = "S/MIME" + + +class PrivateFormat(utils.Enum): + PKCS8 = "PKCS8" + TraditionalOpenSSL = "TraditionalOpenSSL" + Raw = "Raw" + OpenSSH = "OpenSSH" + PKCS12 = "PKCS12" + + def encryption_builder(self) -> KeySerializationEncryptionBuilder: + if self not in (PrivateFormat.OpenSSH, PrivateFormat.PKCS12): + raise ValueError( + "encryption_builder only supported with PrivateFormat.OpenSSH" + " and PrivateFormat.PKCS12" + ) + return KeySerializationEncryptionBuilder(self) + + +class PublicFormat(utils.Enum): + SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" + PKCS1 = "Raw PKCS#1" + OpenSSH = "OpenSSH" + Raw = "Raw" + CompressedPoint = "X9.62 Compressed Point" + UncompressedPoint = "X9.62 Uncompressed Point" + + +class ParameterFormat(utils.Enum): + PKCS3 = "PKCS3" + + +class KeySerializationEncryption(metaclass=abc.ABCMeta): + pass + + +class BestAvailableEncryption(KeySerializationEncryption): + def __init__(self, password: bytes): + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + self.password = password + + +class NoEncryption(KeySerializationEncryption): + pass + + +class KeySerializationEncryptionBuilder: + def __init__( + self, + format: PrivateFormat, + *, + _kdf_rounds: int | None = None, + _hmac_hash: HashAlgorithm | None = None, + _key_cert_algorithm: PBES | None = None, + ) -> None: + self._format = format + + self._kdf_rounds = _kdf_rounds + self._hmac_hash = _hmac_hash + self._key_cert_algorithm = _key_cert_algorithm + + def kdf_rounds(self, rounds: int) -> KeySerializationEncryptionBuilder: + if self._kdf_rounds is not None: + raise ValueError("kdf_rounds already set") + + if not isinstance(rounds, int): + raise TypeError("kdf_rounds must be an integer") + + if rounds < 1: + raise ValueError("kdf_rounds must be a positive integer") + + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=rounds, + _hmac_hash=self._hmac_hash, + _key_cert_algorithm=self._key_cert_algorithm, + ) + + def hmac_hash( + self, algorithm: HashAlgorithm + ) -> KeySerializationEncryptionBuilder: + if self._format is not PrivateFormat.PKCS12: + raise TypeError( + "hmac_hash only supported with PrivateFormat.PKCS12" + ) + + if self._hmac_hash is not None: + raise ValueError("hmac_hash already set") + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=self._kdf_rounds, + _hmac_hash=algorithm, + _key_cert_algorithm=self._key_cert_algorithm, + ) + + def key_cert_algorithm( + self, algorithm: PBES + ) -> KeySerializationEncryptionBuilder: + if self._format is not PrivateFormat.PKCS12: + raise TypeError( + "key_cert_algorithm only supported with PrivateFormat.PKCS12" + ) + if self._key_cert_algorithm is not None: + raise ValueError("key_cert_algorithm already set") + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=self._kdf_rounds, + _hmac_hash=self._hmac_hash, + _key_cert_algorithm=algorithm, + ) + + def build(self, password: bytes) -> KeySerializationEncryption: + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + return _KeySerializationEncryption( + self._format, + password, + kdf_rounds=self._kdf_rounds, + hmac_hash=self._hmac_hash, + key_cert_algorithm=self._key_cert_algorithm, + ) + + +class _KeySerializationEncryption(KeySerializationEncryption): + def __init__( + self, + format: PrivateFormat, + password: bytes, + *, + kdf_rounds: int | None, + hmac_hash: HashAlgorithm | None, + key_cert_algorithm: PBES | None, + ): + self._format = format + self.password = password + + self._kdf_rounds = kdf_rounds + self._hmac_hash = hmac_hash + self._key_cert_algorithm = key_cert_algorithm diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..cdc2617 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-39.pyc new file mode 100644 index 0000000..c6e594b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-39.pyc new file mode 100644 index 0000000..5c718f7 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-39.pyc new file mode 100644 index 0000000..1abd52b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-39.pyc new file mode 100644 index 0000000..af19eee Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-39.pyc new file mode 100644 index 0000000..5527f97 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-39.pyc new file mode 100644 index 0000000..7258eac Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-39.pyc new file mode 100644 index 0000000..5145250 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-39.pyc new file mode 100644 index 0000000..1aa5d89 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..92dba83 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-39.pyc new file mode 100644 index 0000000..2382da8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-39.pyc new file mode 100644 index 0000000..3f418cb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py new file mode 100644 index 0000000..1822e99 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py @@ -0,0 +1,147 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization + +generate_parameters = rust_openssl.dh.generate_parameters + + +DHPrivateNumbers = rust_openssl.dh.DHPrivateNumbers +DHPublicNumbers = rust_openssl.dh.DHPublicNumbers +DHParameterNumbers = rust_openssl.dh.DHParameterNumbers + + +class DHParameters(metaclass=abc.ABCMeta): + @abc.abstractmethod + def generate_private_key(self) -> DHPrivateKey: + """ + Generates and returns a DHPrivateKey. + """ + + @abc.abstractmethod + def parameter_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.ParameterFormat, + ) -> bytes: + """ + Returns the parameters serialized as bytes. + """ + + @abc.abstractmethod + def parameter_numbers(self) -> DHParameterNumbers: + """ + Returns a DHParameterNumbers. + """ + + +DHParametersWithSerialization = DHParameters +DHParameters.register(rust_openssl.dh.DHParameters) + + +class DHPublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self) -> DHParameters: + """ + The DHParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self) -> DHPublicNumbers: + """ + Returns a DHPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> DHPublicKey: + """ + Returns a copy. + """ + + +DHPublicKeyWithSerialization = DHPublicKey +DHPublicKey.register(rust_openssl.dh.DHPublicKey) + + +class DHPrivateKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self) -> DHPublicKey: + """ + The DHPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self) -> DHParameters: + """ + The DHParameters object associated with this private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: DHPublicKey) -> bytes: + """ + Given peer's DHPublicKey, carry out the key exchange and + return shared key as bytes. + """ + + @abc.abstractmethod + def private_numbers(self) -> DHPrivateNumbers: + """ + Returns a DHPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> DHPrivateKey: + """ + Returns a copy. + """ + + +DHPrivateKeyWithSerialization = DHPrivateKey +DHPrivateKey.register(rust_openssl.dh.DHPrivateKey) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 100644 index 0000000..21d78ba --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1,167 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.utils import Buffer + + +class DSAParameters(metaclass=abc.ABCMeta): + @abc.abstractmethod + def generate_private_key(self) -> DSAPrivateKey: + """ + Generates and returns a DSAPrivateKey. + """ + + @abc.abstractmethod + def parameter_numbers(self) -> DSAParameterNumbers: + """ + Returns a DSAParameterNumbers. + """ + + +DSAParametersWithNumbers = DSAParameters +DSAParameters.register(rust_openssl.dsa.DSAParameters) + + +class DSAPrivateKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self) -> DSAPublicKey: + """ + The DSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self) -> DSAParameters: + """ + The DSAParameters object associated with this private key. + """ + + @abc.abstractmethod + def sign( + self, + data: Buffer, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> bytes: + """ + Signs the data + """ + + @abc.abstractmethod + def private_numbers(self) -> DSAPrivateNumbers: + """ + Returns a DSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> DSAPrivateKey: + """ + Returns a copy. + """ + + +DSAPrivateKeyWithSerialization = DSAPrivateKey +DSAPrivateKey.register(rust_openssl.dsa.DSAPrivateKey) + + +class DSAPublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self) -> DSAParameters: + """ + The DSAParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self) -> DSAPublicNumbers: + """ + Returns a DSAPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: Buffer, + data: Buffer, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> DSAPublicKey: + """ + Returns a copy. + """ + + +DSAPublicKeyWithSerialization = DSAPublicKey +DSAPublicKey.register(rust_openssl.dsa.DSAPublicKey) + +DSAPrivateNumbers = rust_openssl.dsa.DSAPrivateNumbers +DSAPublicNumbers = rust_openssl.dsa.DSAPublicNumbers +DSAParameterNumbers = rust_openssl.dsa.DSAParameterNumbers + + +def generate_parameters( + key_size: int, backend: typing.Any = None +) -> DSAParameters: + if key_size not in (1024, 2048, 3072, 4096): + raise ValueError("Key size must be 1024, 2048, 3072, or 4096 bits.") + + return rust_openssl.dsa.generate_parameters(key_size) + + +def generate_private_key( + key_size: int, backend: typing.Any = None +) -> DSAPrivateKey: + parameters = generate_parameters(key_size) + return parameters.generate_private_key() diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 100644 index 0000000..8638d20 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1,470 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat._oid import ObjectIdentifier +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils + + +class EllipticCurveOID: + SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1") + SECP224R1 = ObjectIdentifier("1.3.132.0.33") + SECP256K1 = ObjectIdentifier("1.3.132.0.10") + SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7") + SECP384R1 = ObjectIdentifier("1.3.132.0.34") + SECP521R1 = ObjectIdentifier("1.3.132.0.35") + BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7") + BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11") + BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13") + SECT163K1 = ObjectIdentifier("1.3.132.0.1") + SECT163R2 = ObjectIdentifier("1.3.132.0.15") + SECT233K1 = ObjectIdentifier("1.3.132.0.26") + SECT233R1 = ObjectIdentifier("1.3.132.0.27") + SECT283K1 = ObjectIdentifier("1.3.132.0.16") + SECT283R1 = ObjectIdentifier("1.3.132.0.17") + SECT409K1 = ObjectIdentifier("1.3.132.0.36") + SECT409R1 = ObjectIdentifier("1.3.132.0.37") + SECT571K1 = ObjectIdentifier("1.3.132.0.38") + SECT571R1 = ObjectIdentifier("1.3.132.0.39") + + +class EllipticCurve(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + The name of the curve. e.g. secp256r1. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @property + @abc.abstractmethod + def group_order(self) -> int: + """ + The order of the curve's group. + """ + + +class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def algorithm( + self, + ) -> asym_utils.Prehashed | hashes.HashAlgorithm: + """ + The digest algorithm used with this signature. + """ + + +class EllipticCurvePrivateKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def exchange( + self, algorithm: ECDH, peer_public_key: EllipticCurvePublicKey + ) -> bytes: + """ + Performs a key exchange operation using the provided algorithm with the + provided peer's public key. + """ + + @abc.abstractmethod + def public_key(self) -> EllipticCurvePublicKey: + """ + The EllipticCurvePublicKey for this private key. + """ + + @property + @abc.abstractmethod + def curve(self) -> EllipticCurve: + """ + The EllipticCurve that this key is on. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def sign( + self, + data: utils.Buffer, + signature_algorithm: EllipticCurveSignatureAlgorithm, + ) -> bytes: + """ + Signs the data + """ + + @abc.abstractmethod + def private_numbers(self) -> EllipticCurvePrivateNumbers: + """ + Returns an EllipticCurvePrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> EllipticCurvePrivateKey: + """ + Returns a copy. + """ + + +EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey +EllipticCurvePrivateKey.register(rust_openssl.ec.ECPrivateKey) + + +class EllipticCurvePublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def curve(self) -> EllipticCurve: + """ + The EllipticCurve that this key is on. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def public_numbers(self) -> EllipticCurvePublicNumbers: + """ + Returns an EllipticCurvePublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: utils.Buffer, + data: utils.Buffer, + signature_algorithm: EllipticCurveSignatureAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @classmethod + def from_encoded_point( + cls, curve: EllipticCurve, data: bytes + ) -> EllipticCurvePublicKey: + utils._check_bytes("data", data) + + if len(data) == 0: + raise ValueError("data must not be an empty byte string") + + if data[0] not in [0x02, 0x03, 0x04]: + raise ValueError("Unsupported elliptic curve point type") + + return rust_openssl.ec.from_public_bytes(curve, data) + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> EllipticCurvePublicKey: + """ + Returns a copy. + """ + + +EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey +EllipticCurvePublicKey.register(rust_openssl.ec.ECPublicKey) + +EllipticCurvePrivateNumbers = rust_openssl.ec.EllipticCurvePrivateNumbers +EllipticCurvePublicNumbers = rust_openssl.ec.EllipticCurvePublicNumbers + + +class SECT571R1(EllipticCurve): + name = "sect571r1" + key_size = 570 + group_order = 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE661CE18FF55987308059B186823851EC7DD9CA1161DE93D5174D66E8382E9BB2FE84E47 # noqa: E501 + + +class SECT409R1(EllipticCurve): + name = "sect409r1" + key_size = 409 + group_order = 0x10000000000000000000000000000000000000000000000000001E2AAD6A612F33307BE5FA47C3C9E052F838164CD37D9A21173 # noqa: E501 + + +class SECT283R1(EllipticCurve): + name = "sect283r1" + key_size = 283 + group_order = 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEF90399660FC938A90165B042A7CEFADB307 # noqa: E501 + + +class SECT233R1(EllipticCurve): + name = "sect233r1" + key_size = 233 + group_order = 0x1000000000000000000000000000013E974E72F8A6922031D2603CFE0D7 + + +class SECT163R2(EllipticCurve): + name = "sect163r2" + key_size = 163 + group_order = 0x40000000000000000000292FE77E70C12A4234C33 + + +class SECT571K1(EllipticCurve): + name = "sect571k1" + key_size = 571 + group_order = 0x20000000000000000000000000000000000000000000000000000000000000000000000131850E1F19A63E4B391A8DB917F4138B630D84BE5D639381E91DEB45CFE778F637C1001 # noqa: E501 + + +class SECT409K1(EllipticCurve): + name = "sect409k1" + key_size = 409 + group_order = 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE5F83B2D4EA20400EC4557D5ED3E3E7CA5B4B5C83B8E01E5FCF # noqa: E501 + + +class SECT283K1(EllipticCurve): + name = "sect283k1" + key_size = 283 + group_order = 0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE9AE2ED07577265DFF7F94451E061E163C61 # noqa: E501 + + +class SECT233K1(EllipticCurve): + name = "sect233k1" + key_size = 233 + group_order = 0x8000000000000000000000000000069D5BB915BCD46EFB1AD5F173ABDF + + +class SECT163K1(EllipticCurve): + name = "sect163k1" + key_size = 163 + group_order = 0x4000000000000000000020108A2E0CC0D99F8A5EF + + +class SECP521R1(EllipticCurve): + name = "secp521r1" + key_size = 521 + group_order = 0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409 # noqa: E501 + + +class SECP384R1(EllipticCurve): + name = "secp384r1" + key_size = 384 + group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973 # noqa: E501 + + +class SECP256R1(EllipticCurve): + name = "secp256r1" + key_size = 256 + group_order = ( + 0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551 + ) + + +class SECP256K1(EllipticCurve): + name = "secp256k1" + key_size = 256 + group_order = ( + 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 + ) + + +class SECP224R1(EllipticCurve): + name = "secp224r1" + key_size = 224 + group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D + + +class SECP192R1(EllipticCurve): + name = "secp192r1" + key_size = 192 + group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831 + + +class BrainpoolP256R1(EllipticCurve): + name = "brainpoolP256r1" + key_size = 256 + group_order = ( + 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 + ) + + +class BrainpoolP384R1(EllipticCurve): + name = "brainpoolP384r1" + key_size = 384 + group_order = 0x8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425A7CF3AB6AF6B7FC3103B883202E9046565 # noqa: E501 + + +class BrainpoolP512R1(EllipticCurve): + name = "brainpoolP512r1" + key_size = 512 + group_order = 0xAADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330870553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069 # noqa: E501 + + +_CURVE_TYPES: dict[str, EllipticCurve] = { + "prime192v1": SECP192R1(), + "prime256v1": SECP256R1(), + "secp192r1": SECP192R1(), + "secp224r1": SECP224R1(), + "secp256r1": SECP256R1(), + "secp384r1": SECP384R1(), + "secp521r1": SECP521R1(), + "secp256k1": SECP256K1(), + "sect163k1": SECT163K1(), + "sect233k1": SECT233K1(), + "sect283k1": SECT283K1(), + "sect409k1": SECT409K1(), + "sect571k1": SECT571K1(), + "sect163r2": SECT163R2(), + "sect233r1": SECT233R1(), + "sect283r1": SECT283R1(), + "sect409r1": SECT409R1(), + "sect571r1": SECT571R1(), + "brainpoolP256r1": BrainpoolP256R1(), + "brainpoolP384r1": BrainpoolP384R1(), + "brainpoolP512r1": BrainpoolP512R1(), +} + + +class ECDSA(EllipticCurveSignatureAlgorithm): + def __init__( + self, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + deterministic_signing: bool = False, + ): + from cryptography.hazmat.backends.openssl.backend import backend + + if ( + deterministic_signing + and not backend.ecdsa_deterministic_supported() + ): + raise UnsupportedAlgorithm( + "ECDSA with deterministic signature (RFC 6979) is not " + "supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + self._algorithm = algorithm + self._deterministic_signing = deterministic_signing + + @property + def algorithm( + self, + ) -> asym_utils.Prehashed | hashes.HashAlgorithm: + return self._algorithm + + @property + def deterministic_signing( + self, + ) -> bool: + return self._deterministic_signing + + +generate_private_key = rust_openssl.ec.generate_private_key + + +def derive_private_key( + private_value: int, + curve: EllipticCurve, + backend: typing.Any = None, +) -> EllipticCurvePrivateKey: + if not isinstance(private_value, int): + raise TypeError("private_value must be an integer type.") + + if private_value <= 0: + raise ValueError("private_value must be a positive integer.") + + return rust_openssl.ec.derive_private_key(private_value, curve) + + +class ECDH: + pass + + +_OID_TO_CURVE = { + EllipticCurveOID.SECP192R1: SECP192R1, + EllipticCurveOID.SECP224R1: SECP224R1, + EllipticCurveOID.SECP256K1: SECP256K1, + EllipticCurveOID.SECP256R1: SECP256R1, + EllipticCurveOID.SECP384R1: SECP384R1, + EllipticCurveOID.SECP521R1: SECP521R1, + EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1, + EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1, + EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1, + EllipticCurveOID.SECT163K1: SECT163K1, + EllipticCurveOID.SECT163R2: SECT163R2, + EllipticCurveOID.SECT233K1: SECT233K1, + EllipticCurveOID.SECT233R1: SECT233R1, + EllipticCurveOID.SECT283K1: SECT283K1, + EllipticCurveOID.SECT283R1: SECT283R1, + EllipticCurveOID.SECT409K1: SECT409K1, + EllipticCurveOID.SECT409R1: SECT409R1, + EllipticCurveOID.SECT571K1: SECT571K1, + EllipticCurveOID.SECT571R1: SECT571R1, +} + + +def get_curve_for_oid(oid: ObjectIdentifier) -> type[EllipticCurve]: + try: + return _OID_TO_CURVE[oid] + except KeyError: + raise LookupError( + "The provided object identifier has no matching elliptic " + "curve class" + ) + + +_SECT_CURVES: tuple[type[EllipticCurve], ...] = ( + SECT163K1, + SECT163R2, + SECT233K1, + SECT233R1, + SECT283K1, + SECT283R1, + SECT409K1, + SECT409R1, + SECT571K1, + SECT571R1, +) + +for _curve_cls in _SECT_CURVES: + utils.deprecated( + _curve_cls, + __name__, + f"{_curve_cls.__name__} will be removed in the next release.", + utils.DeprecatedIn46, + name=_curve_cls.__name__, + ) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py new file mode 100644 index 0000000..e576dc9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py @@ -0,0 +1,129 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class Ed25519PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> Ed25519PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def verify(self, signature: Buffer, data: Buffer) -> None: + """ + Verify the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> Ed25519PublicKey: + """ + Returns a copy. + """ + + +Ed25519PublicKey.register(rust_openssl.ed25519.Ed25519PublicKey) + + +class Ed25519PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> Ed25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> Ed25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> Ed25519PublicKey: + """ + The Ed25519PublicKey derived from the private key. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def sign(self, data: Buffer) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def __copy__(self) -> Ed25519PrivateKey: + """ + Returns a copy. + """ + + +Ed25519PrivateKey.register(rust_openssl.ed25519.Ed25519PrivateKey) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py new file mode 100644 index 0000000..89db209 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py @@ -0,0 +1,131 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class Ed448PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> Ed448PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def verify(self, signature: Buffer, data: Buffer) -> None: + """ + Verify the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> Ed448PublicKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "ed448"): + Ed448PublicKey.register(rust_openssl.ed448.Ed448PublicKey) + + +class Ed448PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> Ed448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> Ed448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> Ed448PublicKey: + """ + The Ed448PublicKey derived from the private key. + """ + + @abc.abstractmethod + def sign(self, data: Buffer) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def __copy__(self) -> Ed448PrivateKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "x448"): + Ed448PrivateKey.register(rust_openssl.ed448.Ed448PrivateKey) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 100644 index 0000000..5121a28 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1,111 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives._asymmetric import ( + AsymmetricPadding as AsymmetricPadding, +) +from cryptography.hazmat.primitives.asymmetric import rsa + + +class PKCS1v15(AsymmetricPadding): + name = "EMSA-PKCS1-v1_5" + + +class _MaxLength: + "Sentinel value for `MAX_LENGTH`." + + +class _Auto: + "Sentinel value for `AUTO`." + + +class _DigestLength: + "Sentinel value for `DIGEST_LENGTH`." + + +class PSS(AsymmetricPadding): + MAX_LENGTH = _MaxLength() + AUTO = _Auto() + DIGEST_LENGTH = _DigestLength() + name = "EMSA-PSS" + _salt_length: int | _MaxLength | _Auto | _DigestLength + + def __init__( + self, + mgf: MGF, + salt_length: int | _MaxLength | _Auto | _DigestLength, + ) -> None: + self._mgf = mgf + + if not isinstance( + salt_length, (int, _MaxLength, _Auto, _DigestLength) + ): + raise TypeError( + "salt_length must be an integer, MAX_LENGTH, " + "DIGEST_LENGTH, or AUTO" + ) + + if isinstance(salt_length, int) and salt_length < 0: + raise ValueError("salt_length must be zero or greater.") + + self._salt_length = salt_length + + @property + def mgf(self) -> MGF: + return self._mgf + + +class OAEP(AsymmetricPadding): + name = "EME-OAEP" + + def __init__( + self, + mgf: MGF, + algorithm: hashes.HashAlgorithm, + label: bytes | None, + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._mgf = mgf + self._algorithm = algorithm + self._label = label + + @property + def algorithm(self) -> hashes.HashAlgorithm: + return self._algorithm + + @property + def mgf(self) -> MGF: + return self._mgf + + +class MGF(metaclass=abc.ABCMeta): + _algorithm: hashes.HashAlgorithm + + +class MGF1(MGF): + def __init__(self, algorithm: hashes.HashAlgorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._algorithm = algorithm + + +def calculate_max_pss_salt_length( + key: rsa.RSAPrivateKey | rsa.RSAPublicKey, + hash_algorithm: hashes.HashAlgorithm, +) -> int: + if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): + raise TypeError("key must be an RSA public or private key") + # bit length - 1 per RFC 3447 + emlen = (key.key_size + 6) // 8 + salt_length = emlen - hash_algorithm.digest_size - 2 + assert salt_length >= 0 + return salt_length diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 100644 index 0000000..f94812e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1,285 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import random +import typing +from math import gcd + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils + + +class RSAPrivateKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes: + """ + Decrypts the provided ciphertext. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_key(self) -> RSAPublicKey: + """ + The RSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def sign( + self, + data: bytes, + padding: AsymmetricPadding, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def private_numbers(self) -> RSAPrivateNumbers: + """ + Returns an RSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> RSAPrivateKey: + """ + Returns a copy. + """ + + +RSAPrivateKeyWithSerialization = RSAPrivateKey +RSAPrivateKey.register(rust_openssl.rsa.RSAPrivateKey) + + +class RSAPublicKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes: + """ + Encrypts the given plaintext. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_numbers(self) -> RSAPublicNumbers: + """ + Returns an RSAPublicNumbers + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: bytes, + data: bytes, + padding: AsymmetricPadding, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @abc.abstractmethod + def recover_data_from_signature( + self, + signature: bytes, + padding: AsymmetricPadding, + algorithm: hashes.HashAlgorithm | None, + ) -> bytes: + """ + Recovers the original data from the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> RSAPublicKey: + """ + Returns a copy. + """ + + +RSAPublicKeyWithSerialization = RSAPublicKey +RSAPublicKey.register(rust_openssl.rsa.RSAPublicKey) + +RSAPrivateNumbers = rust_openssl.rsa.RSAPrivateNumbers +RSAPublicNumbers = rust_openssl.rsa.RSAPublicNumbers + + +def generate_private_key( + public_exponent: int, + key_size: int, + backend: typing.Any = None, +) -> RSAPrivateKey: + _verify_rsa_parameters(public_exponent, key_size) + return rust_openssl.rsa.generate_private_key(public_exponent, key_size) + + +def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None: + if public_exponent not in (3, 65537): + raise ValueError( + "public_exponent must be either 3 (for legacy compatibility) or " + "65537. Almost everyone should choose 65537 here!" + ) + + if key_size < 1024: + raise ValueError("key_size must be at least 1024-bits.") + + +def _modinv(e: int, m: int) -> int: + """ + Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 + """ + x1, x2 = 1, 0 + a, b = e, m + while b > 0: + q, r = divmod(a, b) + xn = x1 - q * x2 + a, b, x1, x2 = b, r, x2, xn + return x1 % m + + +def rsa_crt_iqmp(p: int, q: int) -> int: + """ + Compute the CRT (q ** -1) % p value from RSA primes p and q. + """ + if p <= 1 or q <= 1: + raise ValueError("Values can't be <= 1") + return _modinv(q, p) + + +def rsa_crt_dmp1(private_exponent: int, p: int) -> int: + """ + Compute the CRT private_exponent % (p - 1) value from the RSA + private_exponent (d) and p. + """ + if private_exponent <= 1 or p <= 1: + raise ValueError("Values can't be <= 1") + return private_exponent % (p - 1) + + +def rsa_crt_dmq1(private_exponent: int, q: int) -> int: + """ + Compute the CRT private_exponent % (q - 1) value from the RSA + private_exponent (d) and q. + """ + if private_exponent <= 1 or q <= 1: + raise ValueError("Values can't be <= 1") + return private_exponent % (q - 1) + + +def rsa_recover_private_exponent(e: int, p: int, q: int) -> int: + """ + Compute the RSA private_exponent (d) given the public exponent (e) + and the RSA primes p and q. + + This uses the Carmichael totient function to generate the + smallest possible working value of the private exponent. + """ + # This lambda_n is the Carmichael totient function. + # The original RSA paper uses the Euler totient function + # here: phi_n = (p - 1) * (q - 1) + # Either version of the private exponent will work, but the + # one generated by the older formulation may be larger + # than necessary. (lambda_n always divides phi_n) + # + # TODO: Replace with lcm(p - 1, q - 1) once the minimum + # supported Python version is >= 3.9. + if e <= 1 or p <= 1 or q <= 1: + raise ValueError("Values can't be <= 1") + lambda_n = (p - 1) * (q - 1) // gcd(p - 1, q - 1) + return _modinv(e, lambda_n) + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. +_MAX_RECOVERY_ATTEMPTS = 500 + + +def rsa_recover_prime_factors(n: int, e: int, d: int) -> tuple[int, int]: + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # reject invalid values early + if d <= 1 or e <= 1: + raise ValueError("d, e can't be <= 1") + if 17 != pow(17, e * d, n): + raise ValueError("n, d, e don't match") + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + tries = 0 + while not spotted and tries < _MAX_RECOVERY_ATTEMPTS: + a = random.randint(2, n - 1) + tries += 1 + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = gcd(cand + 1, n) + spotted = True + break + k *= 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + p, q = sorted((p, q), reverse=True) + return (p, q) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/types.py new file mode 100644 index 0000000..1fe4eaf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/types.py @@ -0,0 +1,111 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.hazmat.primitives.asymmetric import ( + dh, + dsa, + ec, + ed448, + ed25519, + rsa, + x448, + x25519, +) + +# Every asymmetric key type +PublicKeyTypes = typing.Union[ + dh.DHPublicKey, + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, +] +PUBLIC_KEY_TYPES = PublicKeyTypes +utils.deprecated( + PUBLIC_KEY_TYPES, + __name__, + "Use PublicKeyTypes instead", + utils.DeprecatedIn40, + name="PUBLIC_KEY_TYPES", +) +# Every asymmetric key type +PrivateKeyTypes = typing.Union[ + dh.DHPrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + x25519.X25519PrivateKey, + x448.X448PrivateKey, +] +PRIVATE_KEY_TYPES = PrivateKeyTypes +utils.deprecated( + PRIVATE_KEY_TYPES, + __name__, + "Use PrivateKeyTypes instead", + utils.DeprecatedIn40, + name="PRIVATE_KEY_TYPES", +) +# Just the key types we allow to be used for x509 signing. This mirrors +# the certificate public key types +CertificateIssuerPrivateKeyTypes = typing.Union[ + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, +] +CERTIFICATE_PRIVATE_KEY_TYPES = CertificateIssuerPrivateKeyTypes +utils.deprecated( + CERTIFICATE_PRIVATE_KEY_TYPES, + __name__, + "Use CertificateIssuerPrivateKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_PRIVATE_KEY_TYPES", +) +# Just the key types we allow to be used for x509 signing. This mirrors +# the certificate private key types +CertificateIssuerPublicKeyTypes = typing.Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, +] +CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = CertificateIssuerPublicKeyTypes +utils.deprecated( + CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, + __name__, + "Use CertificateIssuerPublicKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES", +) +# This type removes DHPublicKey. x448/x25519 can be a public key +# but cannot be used in signing so they are allowed here. +CertificatePublicKeyTypes = typing.Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, +] +CERTIFICATE_PUBLIC_KEY_TYPES = CertificatePublicKeyTypes +utils.deprecated( + CERTIFICATE_PUBLIC_KEY_TYPES, + __name__, + "Use CertificatePublicKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_PUBLIC_KEY_TYPES", +) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py new file mode 100644 index 0000000..826b956 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py @@ -0,0 +1,24 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import asn1 +from cryptography.hazmat.primitives import hashes + +decode_dss_signature = asn1.decode_dss_signature +encode_dss_signature = asn1.encode_dss_signature + + +class Prehashed: + def __init__(self, algorithm: hashes.HashAlgorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of HashAlgorithm.") + + self._algorithm = algorithm + self._digest_size = algorithm.digest_size + + @property + def digest_size(self) -> int: + return self._digest_size diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py new file mode 100644 index 0000000..a499376 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py @@ -0,0 +1,122 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class X25519PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> X25519PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x25519.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> X25519PublicKey: + """ + Returns a copy. + """ + + +X25519PublicKey.register(rust_openssl.x25519.X25519PublicKey) + + +class X25519PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> X25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + return rust_openssl.x25519.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> X25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x25519.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> X25519PublicKey: + """ + Returns the public key associated with this private key + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: X25519PublicKey) -> bytes: + """ + Performs a key exchange operation using the provided peer's public key. + """ + + @abc.abstractmethod + def __copy__(self) -> X25519PrivateKey: + """ + Returns a copy. + """ + + +X25519PrivateKey.register(rust_openssl.x25519.X25519PrivateKey) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py new file mode 100644 index 0000000..c6fd71b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py @@ -0,0 +1,125 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class X448PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> X448PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> X448PublicKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "x448"): + X448PublicKey.register(rust_openssl.x448.X448PublicKey) + + +class X448PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> X448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> X448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> X448PublicKey: + """ + Returns the public key associated with this private key + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: X448PublicKey) -> bytes: + """ + Performs a key exchange operation using the provided peer's public key. + """ + + @abc.abstractmethod + def __copy__(self) -> X448PrivateKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "x448"): + X448PrivateKey.register(rust_openssl.x448.X448PrivateKey) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 100644 index 0000000..10c15d0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers.base import ( + AEADCipherContext, + AEADDecryptionContext, + AEADEncryptionContext, + Cipher, + CipherContext, +) + +__all__ = [ + "AEADCipherContext", + "AEADDecryptionContext", + "AEADEncryptionContext", + "BlockCipherAlgorithm", + "Cipher", + "CipherAlgorithm", + "CipherContext", +] diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..ef4e6ef Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-39.pyc new file mode 100644 index 0000000..1446f1b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-39.pyc new file mode 100644 index 0000000..f32e25a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..cfa68fa Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-39.pyc new file mode 100644 index 0000000..716b541 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/aead.py new file mode 100644 index 0000000..c8a582d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/aead.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = [ + "AESCCM", + "AESGCM", + "AESGCMSIV", + "AESOCB3", + "AESSIV", + "ChaCha20Poly1305", +] + +AESGCM = rust_openssl.aead.AESGCM +ChaCha20Poly1305 = rust_openssl.aead.ChaCha20Poly1305 +AESCCM = rust_openssl.aead.AESCCM +AESSIV = rust_openssl.aead.AESSIV +AESOCB3 = rust_openssl.aead.AESOCB3 +AESGCMSIV = rust_openssl.aead.AESGCMSIV diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py new file mode 100644 index 0000000..1e402c7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -0,0 +1,136 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography import utils +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + ARC4 as ARC4, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + CAST5 as CAST5, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + IDEA as IDEA, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + SEED as SEED, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + Blowfish as Blowfish, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + TripleDES as TripleDES, +) +from cryptography.hazmat.primitives._cipheralgorithm import _verify_key_size +from cryptography.hazmat.primitives.ciphers import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) + + +class AES(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + # 512 added to support AES-256-XTS, which uses 512-bit keys + key_sizes = frozenset([128, 192, 256, 512]) + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class AES128(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + key_sizes = frozenset([128]) + key_size = 128 + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + +class AES256(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + key_sizes = frozenset([256]) + key_size = 256 + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + +class Camellia(BlockCipherAlgorithm): + name = "camellia" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +utils.deprecated( + ARC4, + __name__, + "ARC4 has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.ARC4 and " + "will be removed from " + "cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.", + utils.DeprecatedIn43, + name="ARC4", +) + + +utils.deprecated( + TripleDES, + __name__, + "TripleDES has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and " + "will be removed from " + "cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.", + utils.DeprecatedIn43, + name="TripleDES", +) + + +class ChaCha20(CipherAlgorithm): + name = "ChaCha20" + key_sizes = frozenset([256]) + + def __init__(self, key: utils.Buffer, nonce: utils.Buffer): + self.key = _verify_key_size(self, key) + utils._check_byteslike("nonce", nonce) + + if len(nonce) != 16: + raise ValueError("nonce must be 128-bits (16 bytes)") + + self._nonce = nonce + + @property + def nonce(self) -> utils.Buffer: + return self._nonce + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class SM4(BlockCipherAlgorithm): + name = "SM4" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/base.py new file mode 100644 index 0000000..24fceea --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -0,0 +1,146 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm +from cryptography.hazmat.primitives.ciphers import modes +from cryptography.utils import Buffer + + +class CipherContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def update(self, data: Buffer) -> bytes: + """ + Processes the provided bytes through the cipher and returns the results + as bytes. + """ + + @abc.abstractmethod + def update_into(self, data: Buffer, buf: Buffer) -> int: + """ + Processes the provided bytes and writes the resulting data into the + provided buffer. Returns the number of bytes written. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Returns the results of processing the final block as bytes. + """ + + @abc.abstractmethod + def reset_nonce(self, nonce: bytes) -> None: + """ + Resets the nonce for the cipher context to the provided value. + Raises an exception if it does not support reset or if the + provided nonce does not have a valid length. + """ + + +class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def authenticate_additional_data(self, data: Buffer) -> None: + """ + Authenticates the provided bytes. + """ + + +class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def finalize_with_tag(self, tag: bytes) -> bytes: + """ + Returns the results of processing the final block as bytes and allows + delayed passing of the authentication tag. + """ + + +class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tag(self) -> bytes: + """ + Returns tag bytes. This is only available after encryption is + finalized. + """ + + +Mode = typing.TypeVar( + "Mode", bound=typing.Optional[modes.Mode], covariant=True +) + + +class Cipher(typing.Generic[Mode]): + def __init__( + self, + algorithm: CipherAlgorithm, + mode: Mode, + backend: typing.Any = None, + ) -> None: + if not isinstance(algorithm, CipherAlgorithm): + raise TypeError("Expected interface of CipherAlgorithm.") + + if mode is not None: + # mypy needs this assert to narrow the type from our generic + # type. Maybe it won't some time in the future. + assert isinstance(mode, modes.Mode) + mode.validate_for_algorithm(algorithm) + + self.algorithm = algorithm + self.mode = mode + + @typing.overload + def encryptor( + self: Cipher[modes.ModeWithAuthenticationTag], + ) -> AEADEncryptionContext: ... + + @typing.overload + def encryptor( + self: _CIPHER_TYPE, + ) -> CipherContext: ... + + def encryptor(self): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if self.mode.tag is not None: + raise ValueError( + "Authentication tag must be None when encrypting." + ) + + return rust_openssl.ciphers.create_encryption_ctx( + self.algorithm, self.mode + ) + + @typing.overload + def decryptor( + self: Cipher[modes.ModeWithAuthenticationTag], + ) -> AEADDecryptionContext: ... + + @typing.overload + def decryptor( + self: _CIPHER_TYPE, + ) -> CipherContext: ... + + def decryptor(self): + return rust_openssl.ciphers.create_decryption_ctx( + self.algorithm, self.mode + ) + + +_CIPHER_TYPE = Cipher[ + typing.Union[ + modes.ModeWithNonce, + modes.ModeWithTweak, + modes.ECB, + modes.ModeWithInitializationVector, + None, + ] +] + +CipherContext.register(rust_openssl.ciphers.CipherContext) +AEADEncryptionContext.register(rust_openssl.ciphers.AEADEncryptionContext) +AEADDecryptionContext.register(rust_openssl.ciphers.AEADDecryptionContext) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/modes.py new file mode 100644 index 0000000..36c555c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1,268 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers import algorithms + + +class Mode(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this mode (e.g. "ECB", "CBC"). + """ + + @abc.abstractmethod + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + """ + Checks that all the necessary invariants of this (mode, algorithm) + combination are met. + """ + + +class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def initialization_vector(self) -> utils.Buffer: + """ + The value of the initialization vector for this mode as bytes. + """ + + +class ModeWithTweak(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tweak(self) -> utils.Buffer: + """ + The value of the tweak for this mode as bytes. + """ + + +class ModeWithNonce(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def nonce(self) -> utils.Buffer: + """ + The value of the nonce for this mode as bytes. + """ + + +class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tag(self) -> bytes | None: + """ + The value of the tag supplied to the constructor of this mode. + """ + + +def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None: + if algorithm.key_size > 256 and algorithm.name == "AES": + raise ValueError( + "Only 128, 192, and 256 bit keys are allowed for this AES mode" + ) + + +def _check_iv_length( + self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm +) -> None: + iv_len = len(self.initialization_vector) + if iv_len * 8 != algorithm.block_size: + raise ValueError(f"Invalid IV size ({iv_len}) for {self.name}.") + + +def _check_nonce_length( + nonce: utils.Buffer, name: str, algorithm: CipherAlgorithm +) -> None: + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + f"{name} requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + if len(nonce) * 8 != algorithm.block_size: + raise ValueError(f"Invalid nonce size ({len(nonce)}) for {name}.") + + +def _check_iv_and_key_length( + self: ModeWithInitializationVector, algorithm: CipherAlgorithm +) -> None: + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + f"{self} requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + _check_aes_key_length(self, algorithm) + _check_iv_length(self, algorithm) + + +class CBC(ModeWithInitializationVector): + name = "CBC" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class XTS(ModeWithTweak): + name = "XTS" + + def __init__(self, tweak: utils.Buffer): + utils._check_byteslike("tweak", tweak) + + if len(tweak) != 16: + raise ValueError("tweak must be 128-bits (16 bytes)") + + self._tweak = tweak + + @property + def tweak(self) -> utils.Buffer: + return self._tweak + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + if isinstance(algorithm, (algorithms.AES128, algorithms.AES256)): + raise TypeError( + "The AES128 and AES256 classes do not support XTS, please use " + "the standard AES class instead." + ) + + if algorithm.key_size not in (256, 512): + raise ValueError( + "The XTS specification requires a 256-bit key for AES-128-XTS" + " and 512-bit key for AES-256-XTS" + ) + + +class ECB(Mode): + name = "ECB" + + validate_for_algorithm = _check_aes_key_length + + +class OFB(ModeWithInitializationVector): + name = "OFB" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CFB(ModeWithInitializationVector): + name = "CFB" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CFB8(ModeWithInitializationVector): + name = "CFB8" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CTR(ModeWithNonce): + name = "CTR" + + def __init__(self, nonce: utils.Buffer): + utils._check_byteslike("nonce", nonce) + self._nonce = nonce + + @property + def nonce(self) -> utils.Buffer: + return self._nonce + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + _check_aes_key_length(self, algorithm) + _check_nonce_length(self.nonce, self.name, algorithm) + + +class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag): + name = "GCM" + _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8 + _MAX_AAD_BYTES = (2**64) // 8 + + def __init__( + self, + initialization_vector: utils.Buffer, + tag: bytes | None = None, + min_tag_length: int = 16, + ): + # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive + # This is a sane limit anyway so we'll enforce it here. + utils._check_byteslike("initialization_vector", initialization_vector) + if len(initialization_vector) < 8 or len(initialization_vector) > 128: + raise ValueError( + "initialization_vector must be between 8 and 128 bytes (64 " + "and 1024 bits)." + ) + self._initialization_vector = initialization_vector + if tag is not None: + utils._check_bytes("tag", tag) + if min_tag_length < 4: + raise ValueError("min_tag_length must be >= 4") + if len(tag) < min_tag_length: + raise ValueError( + f"Authentication tag must be {min_tag_length} bytes or " + "longer." + ) + self._tag = tag + self._min_tag_length = min_tag_length + + @property + def tag(self) -> bytes | None: + return self._tag + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + _check_aes_key_length(self, algorithm) + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + "GCM requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + block_size_bytes = algorithm.block_size // 8 + if self._tag is not None and len(self._tag) > block_size_bytes: + raise ValueError( + f"Authentication tag cannot be more than {block_size_bytes} " + "bytes." + ) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/cmac.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/cmac.py new file mode 100644 index 0000000..2c67ce2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/cmac.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = ["CMAC"] +CMAC = rust_openssl.cmac.CMAC diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/constant_time.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/constant_time.py new file mode 100644 index 0000000..3975c71 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import hmac + + +def bytes_eq(a: bytes, b: bytes) -> bool: + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return hmac.compare_digest(a, b) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/hashes.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/hashes.py new file mode 100644 index 0000000..4b55ec3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/hashes.py @@ -0,0 +1,246 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.utils import Buffer + +__all__ = [ + "MD5", + "SHA1", + "SHA3_224", + "SHA3_256", + "SHA3_384", + "SHA3_512", + "SHA224", + "SHA256", + "SHA384", + "SHA512", + "SHA512_224", + "SHA512_256", + "SHAKE128", + "SHAKE256", + "SM3", + "BLAKE2b", + "BLAKE2s", + "ExtendableOutputFunction", + "Hash", + "HashAlgorithm", + "HashContext", + "XOFHash", +] + + +class HashAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this algorithm (e.g. "sha256", "md5"). + """ + + @property + @abc.abstractmethod + def digest_size(self) -> int: + """ + The size of the resulting digest in bytes. + """ + + @property + @abc.abstractmethod + def block_size(self) -> int | None: + """ + The internal block size of the hash function, or None if the hash + function does not use blocks internally (e.g. SHA3). + """ + + +class HashContext(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def algorithm(self) -> HashAlgorithm: + """ + A HashAlgorithm that will be used by this context. + """ + + @abc.abstractmethod + def update(self, data: Buffer) -> None: + """ + Processes the provided bytes through the hash. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Finalizes the hash context and returns the hash digest as bytes. + """ + + @abc.abstractmethod + def copy(self) -> HashContext: + """ + Return a HashContext that is a copy of the current context. + """ + + +Hash = rust_openssl.hashes.Hash +HashContext.register(Hash) + +XOFHash = rust_openssl.hashes.XOFHash + + +class ExtendableOutputFunction(metaclass=abc.ABCMeta): + """ + An interface for extendable output functions. + """ + + +class SHA1(HashAlgorithm): + name = "sha1" + digest_size = 20 + block_size = 64 + + +class SHA512_224(HashAlgorithm): # noqa: N801 + name = "sha512-224" + digest_size = 28 + block_size = 128 + + +class SHA512_256(HashAlgorithm): # noqa: N801 + name = "sha512-256" + digest_size = 32 + block_size = 128 + + +class SHA224(HashAlgorithm): + name = "sha224" + digest_size = 28 + block_size = 64 + + +class SHA256(HashAlgorithm): + name = "sha256" + digest_size = 32 + block_size = 64 + + +class SHA384(HashAlgorithm): + name = "sha384" + digest_size = 48 + block_size = 128 + + +class SHA512(HashAlgorithm): + name = "sha512" + digest_size = 64 + block_size = 128 + + +class SHA3_224(HashAlgorithm): # noqa: N801 + name = "sha3-224" + digest_size = 28 + block_size = None + + +class SHA3_256(HashAlgorithm): # noqa: N801 + name = "sha3-256" + digest_size = 32 + block_size = None + + +class SHA3_384(HashAlgorithm): # noqa: N801 + name = "sha3-384" + digest_size = 48 + block_size = None + + +class SHA3_512(HashAlgorithm): # noqa: N801 + name = "sha3-512" + digest_size = 64 + block_size = None + + +class SHAKE128(HashAlgorithm, ExtendableOutputFunction): + name = "shake128" + block_size = None + + def __init__(self, digest_size: int): + if not isinstance(digest_size, int): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class SHAKE256(HashAlgorithm, ExtendableOutputFunction): + name = "shake256" + block_size = None + + def __init__(self, digest_size: int): + if not isinstance(digest_size, int): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class MD5(HashAlgorithm): + name = "md5" + digest_size = 16 + block_size = 64 + + +class BLAKE2b(HashAlgorithm): + name = "blake2b" + _max_digest_size = 64 + _min_digest_size = 1 + block_size = 128 + + def __init__(self, digest_size: int): + if digest_size != 64: + raise ValueError("Digest size must be 64") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class BLAKE2s(HashAlgorithm): + name = "blake2s" + block_size = 64 + _max_digest_size = 32 + _min_digest_size = 1 + + def __init__(self, digest_size: int): + if digest_size != 32: + raise ValueError("Digest size must be 32") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class SM3(HashAlgorithm): + name = "sm3" + digest_size = 32 + block_size = 64 diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/hmac.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/hmac.py new file mode 100644 index 0000000..a9442d5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/hmac.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import hashes + +__all__ = ["HMAC"] + +HMAC = rust_openssl.hmac.HMAC +hashes.HashContext.register(HMAC) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__init__.py new file mode 100644 index 0000000..79bb459 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + + +class KeyDerivationFunction(metaclass=abc.ABCMeta): + @abc.abstractmethod + def derive(self, key_material: bytes) -> bytes: + """ + Deterministically generates and returns a new key based on the existing + key material. + """ + + @abc.abstractmethod + def verify(self, key_material: bytes, expected_key: bytes) -> None: + """ + Checks whether the key generated by the key material matches the + expected derived key. Raises an exception if they do not match. + """ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..3d455fc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-39.pyc new file mode 100644 index 0000000..b823d9c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-39.pyc new file mode 100644 index 0000000..0702dcb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-39.pyc new file mode 100644 index 0000000..9caa46c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-39.pyc new file mode 100644 index 0000000..9727994 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-39.pyc new file mode 100644 index 0000000..6d6b771 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-39.pyc new file mode 100644 index 0000000..893bf5f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-39.pyc new file mode 100644 index 0000000..33c948f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/argon2.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/argon2.py new file mode 100644 index 0000000..405fc8d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/argon2.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + +Argon2id = rust_openssl.kdf.Argon2id +KeyDerivationFunction.register(Argon2id) + +__all__ = ["Argon2id"] diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py new file mode 100644 index 0000000..1b92841 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -0,0 +1,125 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing +from collections.abc import Callable + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized, InvalidKey +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n: int) -> bytes: + return n.to_bytes(length=4, byteorder="big") + + +def _common_args_checks( + algorithm: hashes.HashAlgorithm, + length: int, + otherinfo: bytes | None, +) -> None: + max_length = algorithm.digest_size * (2**32 - 1) + if length > max_length: + raise ValueError(f"Cannot derive keys larger than {max_length} bits.") + if otherinfo is not None: + utils._check_bytes("otherinfo", otherinfo) + + +def _concatkdf_derive( + key_material: utils.Buffer, + length: int, + auxfn: Callable[[], hashes.HashContext], + otherinfo: bytes, +) -> bytes: + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while length > outlen: + h = auxfn() + h.update(_int_to_u32be(counter)) + h.update(key_material) + h.update(otherinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:length] + + +class ConcatKDFHash(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + otherinfo: bytes | None, + backend: typing.Any = None, + ): + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" + + self._used = False + + def _hash(self) -> hashes.Hash: + return hashes.Hash(self._algorithm) + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive( + key_material, self._length, self._hash, self._otherinfo + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class ConcatKDFHMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes | None, + otherinfo: bytes | None, + backend: typing.Any = None, + ): + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" + + if algorithm.block_size is None: + raise TypeError(f"{algorithm.name} is unsupported for ConcatKDF") + + if salt is None: + salt = b"\x00" * algorithm.block_size + else: + utils._check_bytes("salt", salt) + + self._salt = salt + + self._used = False + + def _hmac(self) -> hmac.HMAC: + return hmac.HMAC(self._salt, self._algorithm) + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive( + key_material, self._length, self._hmac, self._otherinfo + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py new file mode 100644 index 0000000..1e162d9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -0,0 +1,16 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + +HKDF = rust_openssl.kdf.HKDF +HKDFExpand = rust_openssl.kdf.HKDFExpand + +KeyDerivationFunction.register(HKDF) +KeyDerivationFunction.register(HKDFExpand) + +__all__ = ["HKDF", "HKDFExpand"] diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py new file mode 100644 index 0000000..5b47137 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py @@ -0,0 +1,303 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing +from collections.abc import Callable + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.primitives import ( + ciphers, + cmac, + constant_time, + hashes, + hmac, +) +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class Mode(utils.Enum): + CounterMode = "ctr" + + +class CounterLocation(utils.Enum): + BeforeFixed = "before_fixed" + AfterFixed = "after_fixed" + MiddleFixed = "middle_fixed" + + +class _KBKDFDeriver: + def __init__( + self, + prf: Callable, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + break_location: int | None, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + ): + assert callable(prf) + + if not isinstance(mode, Mode): + raise TypeError("mode must be of type Mode") + + if not isinstance(location, CounterLocation): + raise TypeError("location must be of type CounterLocation") + + if break_location is None and location is CounterLocation.MiddleFixed: + raise ValueError("Please specify a break_location") + + if ( + break_location is not None + and location != CounterLocation.MiddleFixed + ): + raise ValueError( + "break_location is ignored when location is not" + " CounterLocation.MiddleFixed" + ) + + if break_location is not None and not isinstance(break_location, int): + raise TypeError("break_location must be an integer") + + if break_location is not None and break_location < 0: + raise ValueError("break_location must be a positive integer") + + if (label or context) and fixed: + raise ValueError( + "When supplying fixed data, label and context are ignored." + ) + + if rlen is None or not self._valid_byte_length(rlen): + raise ValueError("rlen must be between 1 and 4") + + if llen is None and fixed is None: + raise ValueError("Please specify an llen") + + if llen is not None and not isinstance(llen, int): + raise TypeError("llen must be an integer") + + if llen == 0: + raise ValueError("llen must be non-zero") + + if label is None: + label = b"" + + if context is None: + context = b"" + + utils._check_bytes("label", label) + utils._check_bytes("context", context) + self._prf = prf + self._mode = mode + self._length = length + self._rlen = rlen + self._llen = llen + self._location = location + self._break_location = break_location + self._label = label + self._context = context + self._used = False + self._fixed_data = fixed + + @staticmethod + def _valid_byte_length(value: int) -> bool: + if not isinstance(value, int): + raise TypeError("value must be of type int") + + value_bin = utils.int_to_bytes(1, value) + return 1 <= len(value_bin) <= 4 + + def derive( + self, key_material: utils.Buffer, prf_output_size: int + ) -> bytes: + if self._used: + raise AlreadyFinalized + + utils._check_byteslike("key_material", key_material) + self._used = True + + # inverse floor division (equivalent to ceiling) + rounds = -(-self._length // prf_output_size) + + output = [b""] + + # For counter mode, the number of iterations shall not be + # larger than 2^r-1, where r <= 32 is the binary length of the counter + # This ensures that the counter values used as an input to the + # PRF will not repeat during a particular call to the KDF function. + r_bin = utils.int_to_bytes(1, self._rlen) + if rounds > pow(2, len(r_bin) * 8) - 1: + raise ValueError("There are too many iterations.") + + fixed = self._generate_fixed_input() + + if self._location == CounterLocation.BeforeFixed: + data_before_ctr = b"" + data_after_ctr = fixed + elif self._location == CounterLocation.AfterFixed: + data_before_ctr = fixed + data_after_ctr = b"" + else: + if isinstance( + self._break_location, int + ) and self._break_location > len(fixed): + raise ValueError("break_location offset > len(fixed)") + data_before_ctr = fixed[: self._break_location] + data_after_ctr = fixed[self._break_location :] + + for i in range(1, rounds + 1): + h = self._prf(key_material) + + counter = utils.int_to_bytes(i, self._rlen) + input_data = data_before_ctr + counter + data_after_ctr + + h.update(input_data) + + output.append(h.finalize()) + + return b"".join(output)[: self._length] + + def _generate_fixed_input(self) -> bytes: + if self._fixed_data and isinstance(self._fixed_data, bytes): + return self._fixed_data + + l_val = utils.int_to_bytes(self._length * 8, self._llen) + + return b"".join([self._label, b"\x00", self._context, l_val]) + + +class KBKDFHMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + backend: typing.Any = None, + *, + break_location: int | None = None, + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hash algorithm.", + _Reasons.UNSUPPORTED_HASH, + ) + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hmac algorithm.", + _Reasons.UNSUPPORTED_HASH, + ) + + self._algorithm = algorithm + + self._deriver = _KBKDFDeriver( + self._prf, + mode, + length, + rlen, + llen, + location, + break_location, + label, + context, + fixed, + ) + + def _prf(self, key_material: bytes) -> hmac.HMAC: + return hmac.HMAC(key_material, self._algorithm) + + def derive(self, key_material: utils.Buffer) -> bytes: + return self._deriver.derive(key_material, self._algorithm.digest_size) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class KBKDFCMAC(KeyDerivationFunction): + def __init__( + self, + algorithm, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + backend: typing.Any = None, + *, + break_location: int | None = None, + ): + if not issubclass( + algorithm, ciphers.BlockCipherAlgorithm + ) or not issubclass(algorithm, ciphers.CipherAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported cipher algorithm.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + self._algorithm = algorithm + self._cipher: ciphers.BlockCipherAlgorithm | None = None + + self._deriver = _KBKDFDeriver( + self._prf, + mode, + length, + rlen, + llen, + location, + break_location, + label, + context, + fixed, + ) + + def _prf(self, _: bytes) -> cmac.CMAC: + assert self._cipher is not None + + return cmac.CMAC(self._cipher) + + def derive(self, key_material: utils.Buffer) -> bytes: + self._cipher = self._algorithm(key_material) + + assert self._cipher is not None + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.cmac_algorithm_supported(self._cipher): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported cipher algorithm.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + return self._deriver.derive(key_material, self._cipher.block_size // 8) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py new file mode 100644 index 0000000..d539f13 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -0,0 +1,62 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class PBKDF2HMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes, + iterations: int, + backend: typing.Any = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.pbkdf2_hmac_supported(algorithm): + raise UnsupportedAlgorithm( + f"{algorithm.name} is not supported for PBKDF2.", + _Reasons.UNSUPPORTED_HASH, + ) + self._used = False + self._algorithm = algorithm + self._length = length + utils._check_bytes("salt", salt) + self._salt = salt + self._iterations = iterations + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized("PBKDF2 instances can only be used once.") + self._used = True + + return rust_openssl.kdf.derive_pbkdf2_hmac( + key_material, + self._algorithm, + self._salt, + self._iterations, + self._length, + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py new file mode 100644 index 0000000..f791cee --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py @@ -0,0 +1,19 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import sys + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + +# This is used by the scrypt tests to skip tests that require more memory +# than the MEM_LIMIT +_MEM_LIMIT = sys.maxsize // 2 + +Scrypt = rust_openssl.kdf.Scrypt +KeyDerivationFunction.register(Scrypt) + +__all__ = ["Scrypt"] diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py new file mode 100644 index 0000000..63870cd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized, InvalidKey +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n: int) -> bytes: + return n.to_bytes(length=4, byteorder="big") + + +class X963KDF(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + sharedinfo: bytes | None, + backend: typing.Any = None, + ): + max_len = algorithm.digest_size * (2**32 - 1) + if length > max_len: + raise ValueError(f"Cannot derive keys larger than {max_len} bits.") + if sharedinfo is not None: + utils._check_bytes("sharedinfo", sharedinfo) + + self._algorithm = algorithm + self._length = length + self._sharedinfo = sharedinfo + self._used = False + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while self._length > outlen: + h = hashes.Hash(self._algorithm) + h.update(key_material) + h.update(_int_to_u32be(counter)) + if self._sharedinfo is not None: + h.update(self._sharedinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[: self._length] + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/keywrap.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/keywrap.py new file mode 100644 index 0000000..b93d87d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/keywrap.py @@ -0,0 +1,177 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import ECB +from cryptography.hazmat.primitives.constant_time import bytes_eq + + +def _wrap_core( + wrapping_key: bytes, + a: bytes, + r: list[bytes], +) -> bytes: + # RFC 3394 Key Wrap - 2.2.1 (index method) + encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() + n = len(r) + for j in range(6): + for i in range(n): + # every encryption operation is a discrete 16 byte chunk (because + # AES has a 128-bit block size) and since we're using ECB it is + # safe to reuse the encryptor for the entire operation + b = encryptor.update(a + r[i]) + a = ( + int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1) + ).to_bytes(length=8, byteorder="big") + r[i] = b[-8:] + + assert encryptor.finalize() == b"" + + return a + b"".join(r) + + +def aes_key_wrap( + wrapping_key: bytes, + key_to_wrap: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(key_to_wrap) < 16: + raise ValueError("The key to wrap must be at least 16 bytes") + + if len(key_to_wrap) % 8 != 0: + raise ValueError("The key to wrap must be a multiple of 8 bytes") + + a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, a, r) + + +def _unwrap_core( + wrapping_key: bytes, + a: bytes, + r: list[bytes], +) -> tuple[bytes, list[bytes]]: + # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) + decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() + n = len(r) + for j in reversed(range(6)): + for i in reversed(range(n)): + atr = ( + int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1) + ).to_bytes(length=8, byteorder="big") + r[i] + # every decryption operation is a discrete 16 byte chunk so + # it is safe to reuse the decryptor for the entire operation + b = decryptor.update(atr) + a = b[:8] + r[i] = b[-8:] + + assert decryptor.finalize() == b"" + return a, r + + +def aes_key_wrap_with_padding( + wrapping_key: bytes, + key_to_wrap: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xa6\x59\x59\xa6" + len(key_to_wrap).to_bytes( + length=4, byteorder="big" + ) + # pad the key to wrap if necessary + pad = (8 - (len(key_to_wrap) % 8)) % 8 + key_to_wrap = key_to_wrap + b"\x00" * pad + if len(key_to_wrap) == 8: + # RFC 5649 - 4.1 - exactly 8 octets after padding + encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() + b = encryptor.update(aiv + key_to_wrap) + assert encryptor.finalize() == b"" + return b + else: + r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, aiv, r) + + +def aes_key_unwrap_with_padding( + wrapping_key: bytes, + wrapped_key: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapped_key) < 16: + raise InvalidUnwrap("Must be at least 16 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(wrapped_key) == 16: + # RFC 5649 - 4.2 - exactly two 64-bit blocks + decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() + out = decryptor.update(wrapped_key) + assert decryptor.finalize() == b"" + a = out[:8] + data = out[8:] + n = 1 + else: + r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] + encrypted_aiv = r.pop(0) + n = len(r) + a, r = _unwrap_core(wrapping_key, encrypted_aiv, r) + data = b"".join(r) + + # 1) Check that MSB(32,A) = A65959A6. + # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let + # MLI = LSB(32,A). + # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of + # the output data are zero. + mli = int.from_bytes(a[4:], byteorder="big") + b = (8 * n) - mli + if ( + not bytes_eq(a[:4], b"\xa6\x59\x59\xa6") + or not 8 * (n - 1) < mli <= 8 * n + or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b)) + ): + raise InvalidUnwrap() + + if b == 0: + return data + else: + return data[:-b] + + +def aes_key_unwrap( + wrapping_key: bytes, + wrapped_key: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapped_key) < 24: + raise InvalidUnwrap("Must be at least 24 bytes") + + if len(wrapped_key) % 8 != 0: + raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] + a = r.pop(0) + a, r = _unwrap_core(wrapping_key, a, r) + if not bytes_eq(a, aiv): + raise InvalidUnwrap() + + return b"".join(r) + + +class InvalidUnwrap(Exception): + pass diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/padding.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/padding.py new file mode 100644 index 0000000..f9cd1f1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/padding.py @@ -0,0 +1,69 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.hazmat.bindings._rust import ( + ANSIX923PaddingContext, + ANSIX923UnpaddingContext, + PKCS7PaddingContext, + PKCS7UnpaddingContext, +) + + +class PaddingContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def update(self, data: utils.Buffer) -> bytes: + """ + Pads the provided bytes and returns any available data as bytes. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Finalize the padding, returns bytes. + """ + + +def _byte_padding_check(block_size: int) -> None: + if not (0 <= block_size <= 2040): + raise ValueError("block_size must be in range(0, 2041).") + + if block_size % 8 != 0: + raise ValueError("block_size must be a multiple of 8.") + + +class PKCS7: + def __init__(self, block_size: int): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self) -> PaddingContext: + return PKCS7PaddingContext(self.block_size) + + def unpadder(self) -> PaddingContext: + return PKCS7UnpaddingContext(self.block_size) + + +PaddingContext.register(PKCS7PaddingContext) +PaddingContext.register(PKCS7UnpaddingContext) + + +class ANSIX923: + def __init__(self, block_size: int): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self) -> PaddingContext: + return ANSIX923PaddingContext(self.block_size) + + def unpadder(self) -> PaddingContext: + return ANSIX923UnpaddingContext(self.block_size) + + +PaddingContext.register(ANSIX923PaddingContext) +PaddingContext.register(ANSIX923UnpaddingContext) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/poly1305.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/poly1305.py new file mode 100644 index 0000000..7f5a77a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/poly1305.py @@ -0,0 +1,11 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = ["Poly1305"] + +Poly1305 = rust_openssl.poly1305.Poly1305 diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__init__.py new file mode 100644 index 0000000..62283cc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__init__.py @@ -0,0 +1,65 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._serialization import ( + BestAvailableEncryption, + Encoding, + KeySerializationEncryption, + NoEncryption, + ParameterFormat, + PrivateFormat, + PublicFormat, + _KeySerializationEncryption, +) +from cryptography.hazmat.primitives.serialization.base import ( + load_der_parameters, + load_der_private_key, + load_der_public_key, + load_pem_parameters, + load_pem_private_key, + load_pem_public_key, +) +from cryptography.hazmat.primitives.serialization.ssh import ( + SSHCertificate, + SSHCertificateBuilder, + SSHCertificateType, + SSHCertPrivateKeyTypes, + SSHCertPublicKeyTypes, + SSHPrivateKeyTypes, + SSHPublicKeyTypes, + load_ssh_private_key, + load_ssh_public_identity, + load_ssh_public_key, + ssh_key_fingerprint, +) + +__all__ = [ + "BestAvailableEncryption", + "Encoding", + "KeySerializationEncryption", + "NoEncryption", + "ParameterFormat", + "PrivateFormat", + "PublicFormat", + "SSHCertPrivateKeyTypes", + "SSHCertPublicKeyTypes", + "SSHCertificate", + "SSHCertificateBuilder", + "SSHCertificateType", + "SSHPrivateKeyTypes", + "SSHPublicKeyTypes", + "_KeySerializationEncryption", + "load_der_parameters", + "load_der_private_key", + "load_der_public_key", + "load_pem_parameters", + "load_pem_private_key", + "load_pem_public_key", + "load_ssh_private_key", + "load_ssh_public_identity", + "load_ssh_public_key", + "ssh_key_fingerprint", +] diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..3384add Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..9b76ffc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-39.pyc new file mode 100644 index 0000000..211b4b1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-39.pyc new file mode 100644 index 0000000..fc96dab Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-39.pyc new file mode 100644 index 0000000..4013a75 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/base.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/base.py new file mode 100644 index 0000000..e7c998b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/base.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +load_pem_private_key = rust_openssl.keys.load_pem_private_key +load_der_private_key = rust_openssl.keys.load_der_private_key + +load_pem_public_key = rust_openssl.keys.load_pem_public_key +load_der_public_key = rust_openssl.keys.load_der_public_key + +load_pem_parameters = rust_openssl.dh.from_pem_parameters +load_der_parameters = rust_openssl.dh.from_der_parameters diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py new file mode 100644 index 0000000..58884ff --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py @@ -0,0 +1,176 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing +from collections.abc import Iterable + +from cryptography import x509 +from cryptography.hazmat.bindings._rust import pkcs12 as rust_pkcs12 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives._serialization import PBES as PBES +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed448, + ed25519, + rsa, +) +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes + +__all__ = [ + "PBES", + "PKCS12Certificate", + "PKCS12KeyAndCertificates", + "PKCS12PrivateKeyTypes", + "load_key_and_certificates", + "load_pkcs12", + "serialize_java_truststore", + "serialize_key_and_certificates", +] + +PKCS12PrivateKeyTypes = typing.Union[ + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, +] + + +PKCS12Certificate = rust_pkcs12.PKCS12Certificate + + +class PKCS12KeyAndCertificates: + def __init__( + self, + key: PrivateKeyTypes | None, + cert: PKCS12Certificate | None, + additional_certs: list[PKCS12Certificate], + ): + if key is not None and not isinstance( + key, + ( + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + ), + ): + raise TypeError( + "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" + " private key, or None." + ) + if cert is not None and not isinstance(cert, PKCS12Certificate): + raise TypeError("cert must be a PKCS12Certificate object or None") + if not all( + isinstance(add_cert, PKCS12Certificate) + for add_cert in additional_certs + ): + raise TypeError( + "all values in additional_certs must be PKCS12Certificate" + " objects" + ) + self._key = key + self._cert = cert + self._additional_certs = additional_certs + + @property + def key(self) -> PrivateKeyTypes | None: + return self._key + + @property + def cert(self) -> PKCS12Certificate | None: + return self._cert + + @property + def additional_certs(self) -> list[PKCS12Certificate]: + return self._additional_certs + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PKCS12KeyAndCertificates): + return NotImplemented + + return ( + self.key == other.key + and self.cert == other.cert + and self.additional_certs == other.additional_certs + ) + + def __hash__(self) -> int: + return hash((self.key, self.cert, tuple(self.additional_certs))) + + def __repr__(self) -> str: + fmt = ( + "" + ) + return fmt.format(self.key, self.cert, self.additional_certs) + + +load_key_and_certificates = rust_pkcs12.load_key_and_certificates +load_pkcs12 = rust_pkcs12.load_pkcs12 + + +_PKCS12CATypes = typing.Union[ + x509.Certificate, + PKCS12Certificate, +] + + +def serialize_java_truststore( + certs: Iterable[PKCS12Certificate], + encryption_algorithm: serialization.KeySerializationEncryption, +) -> bytes: + if not certs: + raise ValueError("You must supply at least one cert") + + if not isinstance( + encryption_algorithm, serialization.KeySerializationEncryption + ): + raise TypeError( + "Key encryption algorithm must be a " + "KeySerializationEncryption instance" + ) + + return rust_pkcs12.serialize_java_truststore(certs, encryption_algorithm) + + +def serialize_key_and_certificates( + name: bytes | None, + key: PKCS12PrivateKeyTypes | None, + cert: x509.Certificate | None, + cas: Iterable[_PKCS12CATypes] | None, + encryption_algorithm: serialization.KeySerializationEncryption, +) -> bytes: + if key is not None and not isinstance( + key, + ( + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + ), + ): + raise TypeError( + "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" + " private key, or None." + ) + + if not isinstance( + encryption_algorithm, serialization.KeySerializationEncryption + ): + raise TypeError( + "Key encryption algorithm must be a " + "KeySerializationEncryption instance" + ) + + if key is None and cert is None and not cas: + raise ValueError("You must supply at least one of key, cert, or cas") + + return rust_pkcs12.serialize_key_and_certificates( + name, key, cert, cas, encryption_algorithm + ) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py new file mode 100644 index 0000000..456dc5b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py @@ -0,0 +1,411 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import email.base64mime +import email.generator +import email.message +import email.policy +import io +import typing +from collections.abc import Iterable + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import pkcs7 as rust_pkcs7 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa +from cryptography.hazmat.primitives.ciphers import ( + algorithms, +) +from cryptography.utils import _check_byteslike + +load_pem_pkcs7_certificates = rust_pkcs7.load_pem_pkcs7_certificates + +load_der_pkcs7_certificates = rust_pkcs7.load_der_pkcs7_certificates + +serialize_certificates = rust_pkcs7.serialize_certificates + +PKCS7HashTypes = typing.Union[ + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, +] + +PKCS7PrivateKeyTypes = typing.Union[ + rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey +] + +ContentEncryptionAlgorithm = typing.Union[ + typing.Type[algorithms.AES128], typing.Type[algorithms.AES256] +] + + +class PKCS7Options(utils.Enum): + Text = "Add text/plain MIME type" + Binary = "Don't translate input data into canonical MIME format" + DetachedSignature = "Don't embed data in the PKCS7 structure" + NoCapabilities = "Don't embed SMIME capabilities" + NoAttributes = "Don't embed authenticatedAttributes" + NoCerts = "Don't embed signer certificate" + + +class PKCS7SignatureBuilder: + def __init__( + self, + data: utils.Buffer | None = None, + signers: list[ + tuple[ + x509.Certificate, + PKCS7PrivateKeyTypes, + PKCS7HashTypes, + padding.PSS | padding.PKCS1v15 | None, + ] + ] = [], + additional_certs: list[x509.Certificate] = [], + ): + self._data = data + self._signers = signers + self._additional_certs = additional_certs + + def set_data(self, data: utils.Buffer) -> PKCS7SignatureBuilder: + _check_byteslike("data", data) + if self._data is not None: + raise ValueError("data may only be set once") + + return PKCS7SignatureBuilder(data, self._signers) + + def add_signer( + self, + certificate: x509.Certificate, + private_key: PKCS7PrivateKeyTypes, + hash_algorithm: PKCS7HashTypes, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ) -> PKCS7SignatureBuilder: + if not isinstance( + hash_algorithm, + ( + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ), + ): + raise TypeError( + "hash_algorithm must be one of hashes.SHA224, " + "SHA256, SHA384, or SHA512" + ) + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + if not isinstance( + private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey) + ): + raise TypeError("Only RSA & EC keys are supported at this time.") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + return PKCS7SignatureBuilder( + self._data, + [ + *self._signers, + (certificate, private_key, hash_algorithm, rsa_padding), + ], + ) + + def add_certificate( + self, certificate: x509.Certificate + ) -> PKCS7SignatureBuilder: + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + return PKCS7SignatureBuilder( + self._data, self._signers, [*self._additional_certs, certificate] + ) + + def sign( + self, + encoding: serialization.Encoding, + options: Iterable[PKCS7Options], + backend: typing.Any = None, + ) -> bytes: + if len(self._signers) == 0: + raise ValueError("Must have at least one signer") + if self._data is None: + raise ValueError("You must add data to sign") + options = list(options) + if not all(isinstance(x, PKCS7Options) for x in options): + raise ValueError("options must be from the PKCS7Options enum") + if encoding not in ( + serialization.Encoding.PEM, + serialization.Encoding.DER, + serialization.Encoding.SMIME, + ): + raise ValueError( + "Must be PEM, DER, or SMIME from the Encoding enum" + ) + + # Text is a meaningless option unless it is accompanied by + # DetachedSignature + if ( + PKCS7Options.Text in options + and PKCS7Options.DetachedSignature not in options + ): + raise ValueError( + "When passing the Text option you must also pass " + "DetachedSignature" + ) + + if PKCS7Options.Text in options and encoding in ( + serialization.Encoding.DER, + serialization.Encoding.PEM, + ): + raise ValueError( + "The Text option is only available for SMIME serialization" + ) + + # No attributes implies no capabilities so we'll error if you try to + # pass both. + if ( + PKCS7Options.NoAttributes in options + and PKCS7Options.NoCapabilities in options + ): + raise ValueError( + "NoAttributes is a superset of NoCapabilities. Do not pass " + "both values." + ) + + return rust_pkcs7.sign_and_serialize(self, encoding, options) + + +class PKCS7EnvelopeBuilder: + def __init__( + self, + *, + _data: bytes | None = None, + _recipients: list[x509.Certificate] | None = None, + _content_encryption_algorithm: ContentEncryptionAlgorithm + | None = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.rsa_encryption_supported(padding=padding.PKCS1v15()): + raise UnsupportedAlgorithm( + "RSA with PKCS1 v1.5 padding is not supported by this version" + " of OpenSSL.", + _Reasons.UNSUPPORTED_PADDING, + ) + self._data = _data + self._recipients = _recipients if _recipients is not None else [] + self._content_encryption_algorithm = _content_encryption_algorithm + + def set_data(self, data: bytes) -> PKCS7EnvelopeBuilder: + _check_byteslike("data", data) + if self._data is not None: + raise ValueError("data may only be set once") + + return PKCS7EnvelopeBuilder( + _data=data, + _recipients=self._recipients, + _content_encryption_algorithm=self._content_encryption_algorithm, + ) + + def add_recipient( + self, + certificate: x509.Certificate, + ) -> PKCS7EnvelopeBuilder: + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + if not isinstance(certificate.public_key(), rsa.RSAPublicKey): + raise TypeError("Only RSA keys are supported at this time.") + + return PKCS7EnvelopeBuilder( + _data=self._data, + _recipients=[ + *self._recipients, + certificate, + ], + _content_encryption_algorithm=self._content_encryption_algorithm, + ) + + def set_content_encryption_algorithm( + self, content_encryption_algorithm: ContentEncryptionAlgorithm + ) -> PKCS7EnvelopeBuilder: + if self._content_encryption_algorithm is not None: + raise ValueError("Content encryption algo may only be set once") + if content_encryption_algorithm not in { + algorithms.AES128, + algorithms.AES256, + }: + raise TypeError("Only AES128 and AES256 are supported") + + return PKCS7EnvelopeBuilder( + _data=self._data, + _recipients=self._recipients, + _content_encryption_algorithm=content_encryption_algorithm, + ) + + def encrypt( + self, + encoding: serialization.Encoding, + options: Iterable[PKCS7Options], + ) -> bytes: + if len(self._recipients) == 0: + raise ValueError("Must have at least one recipient") + if self._data is None: + raise ValueError("You must add data to encrypt") + + # The default content encryption algorithm is AES-128, which the S/MIME + # v3.2 RFC specifies as MUST support (https://datatracker.ietf.org/doc/html/rfc5751#section-2.7) + content_encryption_algorithm = ( + self._content_encryption_algorithm or algorithms.AES128 + ) + + options = list(options) + if not all(isinstance(x, PKCS7Options) for x in options): + raise ValueError("options must be from the PKCS7Options enum") + if encoding not in ( + serialization.Encoding.PEM, + serialization.Encoding.DER, + serialization.Encoding.SMIME, + ): + raise ValueError( + "Must be PEM, DER, or SMIME from the Encoding enum" + ) + + # Only allow options that make sense for encryption + if any( + opt not in [PKCS7Options.Text, PKCS7Options.Binary] + for opt in options + ): + raise ValueError( + "Only the following options are supported for encryption: " + "Text, Binary" + ) + elif PKCS7Options.Text in options and PKCS7Options.Binary in options: + # OpenSSL accepts both options at the same time, but ignores Text. + # We fail defensively to avoid unexpected outputs. + raise ValueError( + "Cannot use Binary and Text options at the same time" + ) + + return rust_pkcs7.encrypt_and_serialize( + self, content_encryption_algorithm, encoding, options + ) + + +pkcs7_decrypt_der = rust_pkcs7.decrypt_der +pkcs7_decrypt_pem = rust_pkcs7.decrypt_pem +pkcs7_decrypt_smime = rust_pkcs7.decrypt_smime + + +def _smime_signed_encode( + data: bytes, signature: bytes, micalg: str, text_mode: bool +) -> bytes: + # This function works pretty hard to replicate what OpenSSL does + # precisely. For good and for ill. + + m = email.message.Message() + m.add_header("MIME-Version", "1.0") + m.add_header( + "Content-Type", + "multipart/signed", + protocol="application/x-pkcs7-signature", + micalg=micalg, + ) + + m.preamble = "This is an S/MIME signed message\n" + + msg_part = OpenSSLMimePart() + msg_part.set_payload(data) + if text_mode: + msg_part.add_header("Content-Type", "text/plain") + m.attach(msg_part) + + sig_part = email.message.MIMEPart() + sig_part.add_header( + "Content-Type", "application/x-pkcs7-signature", name="smime.p7s" + ) + sig_part.add_header("Content-Transfer-Encoding", "base64") + sig_part.add_header( + "Content-Disposition", "attachment", filename="smime.p7s" + ) + sig_part.set_payload( + email.base64mime.body_encode(signature, maxlinelen=65) + ) + del sig_part["MIME-Version"] + m.attach(sig_part) + + fp = io.BytesIO() + g = email.generator.BytesGenerator( + fp, + maxheaderlen=0, + mangle_from_=False, + policy=m.policy.clone(linesep="\r\n"), + ) + g.flatten(m) + return fp.getvalue() + + +def _smime_enveloped_encode(data: bytes) -> bytes: + m = email.message.Message() + m.add_header("MIME-Version", "1.0") + m.add_header("Content-Disposition", "attachment", filename="smime.p7m") + m.add_header( + "Content-Type", + "application/pkcs7-mime", + smime_type="enveloped-data", + name="smime.p7m", + ) + m.add_header("Content-Transfer-Encoding", "base64") + + m.set_payload(email.base64mime.body_encode(data, maxlinelen=65)) + + return m.as_bytes(policy=m.policy.clone(linesep="\n", max_line_length=0)) + + +def _smime_enveloped_decode(data: bytes) -> bytes: + m = email.message_from_bytes(data) + if m.get_content_type() not in { + "application/x-pkcs7-mime", + "application/pkcs7-mime", + }: + raise ValueError("Not an S/MIME enveloped message") + return bytes(m.get_payload(decode=True)) + + +def _smime_remove_text_headers(data: bytes) -> bytes: + m = email.message_from_bytes(data) + # Using get() instead of get_content_type() since it has None as default, + # where the latter has "text/plain". Both methods are case-insensitive. + content_type = m.get("content-type") + if content_type is None: + raise ValueError( + "Decrypted MIME data has no 'Content-Type' header. " + "Please remove the 'Text' option to parse it manually." + ) + if "text/plain" not in content_type: + raise ValueError( + f"Decrypted MIME data content type is '{content_type}', not " + "'text/plain'. Remove the 'Text' option to parse it manually." + ) + return bytes(m.get_payload(decode=True)) + + +class OpenSSLMimePart(email.message.MIMEPart): + # A MIMEPart subclass that replicates OpenSSL's behavior of not including + # a newline if there are no headers. + def _write_headers(self, generator) -> None: + if list(self.raw_items()): + generator._write_headers(self) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/ssh.py new file mode 100644 index 0000000..cb10cf8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/ssh.py @@ -0,0 +1,1619 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import binascii +import enum +import os +import re +import typing +import warnings +from base64 import encodebytes as _base64_encode +from dataclasses import dataclass + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed25519, + padding, + rsa, +) +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.hazmat.primitives.ciphers import ( + AEADDecryptionContext, + Cipher, + algorithms, + modes, +) +from cryptography.hazmat.primitives.serialization import ( + Encoding, + KeySerializationEncryption, + NoEncryption, + PrivateFormat, + PublicFormat, + _KeySerializationEncryption, +) + +try: + from bcrypt import kdf as _bcrypt_kdf + + _bcrypt_supported = True +except ImportError: + _bcrypt_supported = False + + def _bcrypt_kdf( + password: bytes, + salt: bytes, + desired_key_bytes: int, + rounds: int, + ignore_few_rounds: bool = False, + ) -> bytes: + raise UnsupportedAlgorithm("Need bcrypt module") + + +_SSH_ED25519 = b"ssh-ed25519" +_SSH_RSA = b"ssh-rsa" +_SSH_DSA = b"ssh-dss" +_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256" +_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384" +_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521" +_CERT_SUFFIX = b"-cert-v01@openssh.com" + +# U2F application string suffixed pubkey +_SK_SSH_ED25519 = b"sk-ssh-ed25519@openssh.com" +_SK_SSH_ECDSA_NISTP256 = b"sk-ecdsa-sha2-nistp256@openssh.com" + +# These are not key types, only algorithms, so they cannot appear +# as a public key type +_SSH_RSA_SHA256 = b"rsa-sha2-256" +_SSH_RSA_SHA512 = b"rsa-sha2-512" + +_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)") +_SK_MAGIC = b"openssh-key-v1\0" +_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----" +_SK_END = b"-----END OPENSSH PRIVATE KEY-----" +_BCRYPT = b"bcrypt" +_NONE = b"none" +_DEFAULT_CIPHER = b"aes256-ctr" +_DEFAULT_ROUNDS = 16 + +# re is only way to work on bytes-like data +_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL) + +# padding for max blocksize +_PADDING = memoryview(bytearray(range(1, 1 + 16))) + + +@dataclass +class _SSHCipher: + alg: type[algorithms.AES] + key_len: int + mode: type[modes.CTR] | type[modes.CBC] | type[modes.GCM] + block_len: int + iv_len: int + tag_len: int | None + is_aead: bool + + +# ciphers that are actually used in key wrapping +_SSH_CIPHERS: dict[bytes, _SSHCipher] = { + b"aes256-ctr": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.CTR, + block_len=16, + iv_len=16, + tag_len=None, + is_aead=False, + ), + b"aes256-cbc": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.CBC, + block_len=16, + iv_len=16, + tag_len=None, + is_aead=False, + ), + b"aes256-gcm@openssh.com": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.GCM, + block_len=16, + iv_len=12, + tag_len=16, + is_aead=True, + ), +} + +# map local curve name to key type +_ECDSA_KEY_TYPE = { + "secp256r1": _ECDSA_NISTP256, + "secp384r1": _ECDSA_NISTP384, + "secp521r1": _ECDSA_NISTP521, +} + + +def _get_ssh_key_type(key: SSHPrivateKeyTypes | SSHPublicKeyTypes) -> bytes: + if isinstance(key, ec.EllipticCurvePrivateKey): + key_type = _ecdsa_key_type(key.public_key()) + elif isinstance(key, ec.EllipticCurvePublicKey): + key_type = _ecdsa_key_type(key) + elif isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): + key_type = _SSH_RSA + elif isinstance(key, (dsa.DSAPrivateKey, dsa.DSAPublicKey)): + key_type = _SSH_DSA + elif isinstance( + key, (ed25519.Ed25519PrivateKey, ed25519.Ed25519PublicKey) + ): + key_type = _SSH_ED25519 + else: + raise ValueError("Unsupported key type") + + return key_type + + +def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes: + """Return SSH key_type and curve_name for private key.""" + curve = public_key.curve + if curve.name not in _ECDSA_KEY_TYPE: + raise ValueError( + f"Unsupported curve for ssh private key: {curve.name!r}" + ) + return _ECDSA_KEY_TYPE[curve.name] + + +def _ssh_pem_encode( + data: utils.Buffer, + prefix: bytes = _SK_START + b"\n", + suffix: bytes = _SK_END + b"\n", +) -> bytes: + return b"".join([prefix, _base64_encode(data), suffix]) + + +def _check_block_size(data: utils.Buffer, block_len: int) -> None: + """Require data to be full blocks""" + if not data or len(data) % block_len != 0: + raise ValueError("Corrupt data: missing padding") + + +def _check_empty(data: utils.Buffer) -> None: + """All data should have been parsed.""" + if data: + raise ValueError("Corrupt data: unparsed data") + + +def _init_cipher( + ciphername: bytes, + password: bytes | None, + salt: bytes, + rounds: int, +) -> Cipher[modes.CBC | modes.CTR | modes.GCM]: + """Generate key + iv and return cipher.""" + if not password: + raise TypeError( + "Key is password-protected, but password was not provided." + ) + + ciph = _SSH_CIPHERS[ciphername] + seed = _bcrypt_kdf( + password, salt, ciph.key_len + ciph.iv_len, rounds, True + ) + return Cipher( + ciph.alg(seed[: ciph.key_len]), + ciph.mode(seed[ciph.key_len :]), + ) + + +def _get_u32(data: memoryview) -> tuple[int, memoryview]: + """Uint32""" + if len(data) < 4: + raise ValueError("Invalid data") + return int.from_bytes(data[:4], byteorder="big"), data[4:] + + +def _get_u64(data: memoryview) -> tuple[int, memoryview]: + """Uint64""" + if len(data) < 8: + raise ValueError("Invalid data") + return int.from_bytes(data[:8], byteorder="big"), data[8:] + + +def _get_sshstr(data: memoryview) -> tuple[memoryview, memoryview]: + """Bytes with u32 length prefix""" + n, data = _get_u32(data) + if n > len(data): + raise ValueError("Invalid data") + return data[:n], data[n:] + + +def _get_mpint(data: memoryview) -> tuple[int, memoryview]: + """Big integer.""" + val, data = _get_sshstr(data) + if val and val[0] > 0x7F: + raise ValueError("Invalid data") + return int.from_bytes(val, "big"), data + + +def _to_mpint(val: int) -> bytes: + """Storage format for signed bigint.""" + if val < 0: + raise ValueError("negative mpint not allowed") + if not val: + return b"" + nbytes = (val.bit_length() + 8) // 8 + return utils.int_to_bytes(val, nbytes) + + +class _FragList: + """Build recursive structure without data copy.""" + + flist: list[utils.Buffer] + + def __init__(self, init: list[utils.Buffer] | None = None) -> None: + self.flist = [] + if init: + self.flist.extend(init) + + def put_raw(self, val: utils.Buffer) -> None: + """Add plain bytes""" + self.flist.append(val) + + def put_u32(self, val: int) -> None: + """Big-endian uint32""" + self.flist.append(val.to_bytes(length=4, byteorder="big")) + + def put_u64(self, val: int) -> None: + """Big-endian uint64""" + self.flist.append(val.to_bytes(length=8, byteorder="big")) + + def put_sshstr(self, val: bytes | _FragList) -> None: + """Bytes prefixed with u32 length""" + if isinstance(val, (bytes, memoryview, bytearray)): + self.put_u32(len(val)) + self.flist.append(val) + else: + self.put_u32(val.size()) + self.flist.extend(val.flist) + + def put_mpint(self, val: int) -> None: + """Big-endian bigint prefixed with u32 length""" + self.put_sshstr(_to_mpint(val)) + + def size(self) -> int: + """Current number of bytes""" + return sum(map(len, self.flist)) + + def render(self, dstbuf: memoryview, pos: int = 0) -> int: + """Write into bytearray""" + for frag in self.flist: + flen = len(frag) + start, pos = pos, pos + flen + dstbuf[start:pos] = frag + return pos + + def tobytes(self) -> bytes: + """Return as bytes""" + buf = memoryview(bytearray(self.size())) + self.render(buf) + return buf.tobytes() + + +class _SSHFormatRSA: + """Format for RSA keys. + + Public: + mpint e, n + Private: + mpint n, e, d, iqmp, p, q + """ + + def get_public( + self, data: memoryview + ) -> tuple[tuple[int, int], memoryview]: + """RSA public fields""" + e, data = _get_mpint(data) + n, data = _get_mpint(data) + return (e, n), data + + def load_public( + self, data: memoryview + ) -> tuple[rsa.RSAPublicKey, memoryview]: + """Make RSA public key from data.""" + (e, n), data = self.get_public(data) + public_numbers = rsa.RSAPublicNumbers(e, n) + public_key = public_numbers.public_key() + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[rsa.RSAPrivateKey, memoryview]: + """Make RSA private key from data.""" + n, data = _get_mpint(data) + e, data = _get_mpint(data) + d, data = _get_mpint(data) + iqmp, data = _get_mpint(data) + p, data = _get_mpint(data) + q, data = _get_mpint(data) + + if (e, n) != pubfields: + raise ValueError("Corrupt data: rsa field mismatch") + dmp1 = rsa.rsa_crt_dmp1(d, p) + dmq1 = rsa.rsa_crt_dmq1(d, q) + public_numbers = rsa.RSAPublicNumbers(e, n) + private_numbers = rsa.RSAPrivateNumbers( + p, q, d, dmp1, dmq1, iqmp, public_numbers + ) + private_key = private_numbers.private_key( + unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation + ) + return private_key, data + + def encode_public( + self, public_key: rsa.RSAPublicKey, f_pub: _FragList + ) -> None: + """Write RSA public key""" + pubn = public_key.public_numbers() + f_pub.put_mpint(pubn.e) + f_pub.put_mpint(pubn.n) + + def encode_private( + self, private_key: rsa.RSAPrivateKey, f_priv: _FragList + ) -> None: + """Write RSA private key""" + private_numbers = private_key.private_numbers() + public_numbers = private_numbers.public_numbers + + f_priv.put_mpint(public_numbers.n) + f_priv.put_mpint(public_numbers.e) + + f_priv.put_mpint(private_numbers.d) + f_priv.put_mpint(private_numbers.iqmp) + f_priv.put_mpint(private_numbers.p) + f_priv.put_mpint(private_numbers.q) + + +class _SSHFormatDSA: + """Format for DSA keys. + + Public: + mpint p, q, g, y + Private: + mpint p, q, g, y, x + """ + + def get_public(self, data: memoryview) -> tuple[tuple, memoryview]: + """DSA public fields""" + p, data = _get_mpint(data) + q, data = _get_mpint(data) + g, data = _get_mpint(data) + y, data = _get_mpint(data) + return (p, q, g, y), data + + def load_public( + self, data: memoryview + ) -> tuple[dsa.DSAPublicKey, memoryview]: + """Make DSA public key from data.""" + (p, q, g, y), data = self.get_public(data) + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + self._validate(public_numbers) + public_key = public_numbers.public_key() + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[dsa.DSAPrivateKey, memoryview]: + """Make DSA private key from data.""" + (p, q, g, y), data = self.get_public(data) + x, data = _get_mpint(data) + + if (p, q, g, y) != pubfields: + raise ValueError("Corrupt data: dsa field mismatch") + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + self._validate(public_numbers) + private_numbers = dsa.DSAPrivateNumbers(x, public_numbers) + private_key = private_numbers.private_key() + return private_key, data + + def encode_public( + self, public_key: dsa.DSAPublicKey, f_pub: _FragList + ) -> None: + """Write DSA public key""" + public_numbers = public_key.public_numbers() + parameter_numbers = public_numbers.parameter_numbers + self._validate(public_numbers) + + f_pub.put_mpint(parameter_numbers.p) + f_pub.put_mpint(parameter_numbers.q) + f_pub.put_mpint(parameter_numbers.g) + f_pub.put_mpint(public_numbers.y) + + def encode_private( + self, private_key: dsa.DSAPrivateKey, f_priv: _FragList + ) -> None: + """Write DSA private key""" + self.encode_public(private_key.public_key(), f_priv) + f_priv.put_mpint(private_key.private_numbers().x) + + def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None: + parameter_numbers = public_numbers.parameter_numbers + if parameter_numbers.p.bit_length() != 1024: + raise ValueError("SSH supports only 1024 bit DSA keys") + + +class _SSHFormatECDSA: + """Format for ECDSA keys. + + Public: + str curve + bytes point + Private: + str curve + bytes point + mpint secret + """ + + def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve): + self.ssh_curve_name = ssh_curve_name + self.curve = curve + + def get_public( + self, data: memoryview + ) -> tuple[tuple[memoryview, memoryview], memoryview]: + """ECDSA public fields""" + curve, data = _get_sshstr(data) + point, data = _get_sshstr(data) + if curve != self.ssh_curve_name: + raise ValueError("Curve name mismatch") + if point[0] != 4: + raise NotImplementedError("Need uncompressed point") + return (curve, point), data + + def load_public( + self, data: memoryview + ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: + """Make ECDSA public key from data.""" + (_, point), data = self.get_public(data) + public_key = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, point.tobytes() + ) + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[ec.EllipticCurvePrivateKey, memoryview]: + """Make ECDSA private key from data.""" + (curve_name, point), data = self.get_public(data) + secret, data = _get_mpint(data) + + if (curve_name, point) != pubfields: + raise ValueError("Corrupt data: ecdsa field mismatch") + private_key = ec.derive_private_key(secret, self.curve) + return private_key, data + + def encode_public( + self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList + ) -> None: + """Write ECDSA public key""" + point = public_key.public_bytes( + Encoding.X962, PublicFormat.UncompressedPoint + ) + f_pub.put_sshstr(self.ssh_curve_name) + f_pub.put_sshstr(point) + + def encode_private( + self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList + ) -> None: + """Write ECDSA private key""" + public_key = private_key.public_key() + private_numbers = private_key.private_numbers() + + self.encode_public(public_key, f_priv) + f_priv.put_mpint(private_numbers.private_value) + + +class _SSHFormatEd25519: + """Format for Ed25519 keys. + + Public: + bytes point + Private: + bytes point + bytes secret_and_point + """ + + def get_public( + self, data: memoryview + ) -> tuple[tuple[memoryview], memoryview]: + """Ed25519 public fields""" + point, data = _get_sshstr(data) + return (point,), data + + def load_public( + self, data: memoryview + ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: + """Make Ed25519 public key from data.""" + (point,), data = self.get_public(data) + public_key = ed25519.Ed25519PublicKey.from_public_bytes( + point.tobytes() + ) + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[ed25519.Ed25519PrivateKey, memoryview]: + """Make Ed25519 private key from data.""" + (point,), data = self.get_public(data) + keypair, data = _get_sshstr(data) + + secret = keypair[:32] + point2 = keypair[32:] + if point != point2 or (point,) != pubfields: + raise ValueError("Corrupt data: ed25519 field mismatch") + private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret) + return private_key, data + + def encode_public( + self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList + ) -> None: + """Write Ed25519 public key""" + raw_public_key = public_key.public_bytes( + Encoding.Raw, PublicFormat.Raw + ) + f_pub.put_sshstr(raw_public_key) + + def encode_private( + self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList + ) -> None: + """Write Ed25519 private key""" + public_key = private_key.public_key() + raw_private_key = private_key.private_bytes( + Encoding.Raw, PrivateFormat.Raw, NoEncryption() + ) + raw_public_key = public_key.public_bytes( + Encoding.Raw, PublicFormat.Raw + ) + f_keypair = _FragList([raw_private_key, raw_public_key]) + + self.encode_public(public_key, f_priv) + f_priv.put_sshstr(f_keypair) + + +def load_application(data) -> tuple[memoryview, memoryview]: + """ + U2F application strings + """ + application, data = _get_sshstr(data) + if not application.tobytes().startswith(b"ssh:"): + raise ValueError( + "U2F application string does not start with b'ssh:' " + f"({application})" + ) + return application, data + + +class _SSHFormatSKEd25519: + """ + The format of a sk-ssh-ed25519@openssh.com public key is: + + string "sk-ssh-ed25519@openssh.com" + string public key + string application (user-specified, but typically "ssh:") + """ + + def load_public( + self, data: memoryview + ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: + """Make Ed25519 public key from data.""" + public_key, data = _lookup_kformat(_SSH_ED25519).load_public(data) + _, data = load_application(data) + return public_key, data + + def get_public(self, data: memoryview) -> typing.NoReturn: + # Confusingly `get_public` is an entry point used by private key + # loading. + raise UnsupportedAlgorithm( + "sk-ssh-ed25519 private keys cannot be loaded" + ) + + +class _SSHFormatSKECDSA: + """ + The format of a sk-ecdsa-sha2-nistp256@openssh.com public key is: + + string "sk-ecdsa-sha2-nistp256@openssh.com" + string curve name + ec_point Q + string application (user-specified, but typically "ssh:") + """ + + def load_public( + self, data: memoryview + ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: + """Make ECDSA public key from data.""" + public_key, data = _lookup_kformat(_ECDSA_NISTP256).load_public(data) + _, data = load_application(data) + return public_key, data + + def get_public(self, data: memoryview) -> typing.NoReturn: + # Confusingly `get_public` is an entry point used by private key + # loading. + raise UnsupportedAlgorithm( + "sk-ecdsa-sha2-nistp256 private keys cannot be loaded" + ) + + +_KEY_FORMATS = { + _SSH_RSA: _SSHFormatRSA(), + _SSH_DSA: _SSHFormatDSA(), + _SSH_ED25519: _SSHFormatEd25519(), + _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()), + _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()), + _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()), + _SK_SSH_ED25519: _SSHFormatSKEd25519(), + _SK_SSH_ECDSA_NISTP256: _SSHFormatSKECDSA(), +} + + +def _lookup_kformat(key_type: utils.Buffer): + """Return valid format or throw error""" + if not isinstance(key_type, bytes): + key_type = memoryview(key_type).tobytes() + if key_type in _KEY_FORMATS: + return _KEY_FORMATS[key_type] + raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}") + + +SSHPrivateKeyTypes = typing.Union[ + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ed25519.Ed25519PrivateKey, +] + + +def load_ssh_private_key( + data: utils.Buffer, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> SSHPrivateKeyTypes: + """Load private key from OpenSSH custom encoding.""" + utils._check_byteslike("data", data) + if password is not None: + utils._check_bytes("password", password) + + m = _PEM_RC.search(data) + if not m: + raise ValueError("Not OpenSSH private key format") + p1 = m.start(1) + p2 = m.end(1) + data = binascii.a2b_base64(memoryview(data)[p1:p2]) + if not data.startswith(_SK_MAGIC): + raise ValueError("Not OpenSSH private key format") + data = memoryview(data)[len(_SK_MAGIC) :] + + # parse header + ciphername, data = _get_sshstr(data) + kdfname, data = _get_sshstr(data) + kdfoptions, data = _get_sshstr(data) + nkeys, data = _get_u32(data) + if nkeys != 1: + raise ValueError("Only one key supported") + + # load public key data + pubdata, data = _get_sshstr(data) + pub_key_type, pubdata = _get_sshstr(pubdata) + kformat = _lookup_kformat(pub_key_type) + pubfields, pubdata = kformat.get_public(pubdata) + _check_empty(pubdata) + + if ciphername != _NONE or kdfname != _NONE: + ciphername_bytes = ciphername.tobytes() + if ciphername_bytes not in _SSH_CIPHERS: + raise UnsupportedAlgorithm( + f"Unsupported cipher: {ciphername_bytes!r}" + ) + if kdfname != _BCRYPT: + raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}") + blklen = _SSH_CIPHERS[ciphername_bytes].block_len + tag_len = _SSH_CIPHERS[ciphername_bytes].tag_len + # load secret data + edata, data = _get_sshstr(data) + # see https://bugzilla.mindrot.org/show_bug.cgi?id=3553 for + # information about how OpenSSH handles AEAD tags + if _SSH_CIPHERS[ciphername_bytes].is_aead: + tag = bytes(data) + if len(tag) != tag_len: + raise ValueError("Corrupt data: invalid tag length for cipher") + else: + _check_empty(data) + _check_block_size(edata, blklen) + salt, kbuf = _get_sshstr(kdfoptions) + rounds, kbuf = _get_u32(kbuf) + _check_empty(kbuf) + ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds) + dec = ciph.decryptor() + edata = memoryview(dec.update(edata)) + if _SSH_CIPHERS[ciphername_bytes].is_aead: + assert isinstance(dec, AEADDecryptionContext) + _check_empty(dec.finalize_with_tag(tag)) + else: + # _check_block_size requires data to be a full block so there + # should be no output from finalize + _check_empty(dec.finalize()) + else: + if password: + raise TypeError( + "Password was given but private key is not encrypted." + ) + # load secret data + edata, data = _get_sshstr(data) + _check_empty(data) + blklen = 8 + _check_block_size(edata, blklen) + ck1, edata = _get_u32(edata) + ck2, edata = _get_u32(edata) + if ck1 != ck2: + raise ValueError("Corrupt data: broken checksum") + + # load per-key struct + key_type, edata = _get_sshstr(edata) + if key_type != pub_key_type: + raise ValueError("Corrupt data: key type mismatch") + private_key, edata = kformat.load_private( + edata, + pubfields, + unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation, + ) + # We don't use the comment + _, edata = _get_sshstr(edata) + + # yes, SSH does padding check *after* all other parsing is done. + # need to follow as it writes zero-byte padding too. + if edata != _PADDING[: len(edata)]: + raise ValueError("Corrupt data: invalid padding") + + if isinstance(private_key, dsa.DSAPrivateKey): + warnings.warn( + "SSH DSA keys are deprecated and will be removed in a future " + "release.", + utils.DeprecatedIn40, + stacklevel=2, + ) + + return private_key + + +def _serialize_ssh_private_key( + private_key: SSHPrivateKeyTypes, + password: bytes, + encryption_algorithm: KeySerializationEncryption, +) -> bytes: + """Serialize private key with OpenSSH custom encoding.""" + utils._check_bytes("password", password) + if isinstance(private_key, dsa.DSAPrivateKey): + warnings.warn( + "SSH DSA key support is deprecated and will be " + "removed in a future release", + utils.DeprecatedIn40, + stacklevel=4, + ) + + key_type = _get_ssh_key_type(private_key) + kformat = _lookup_kformat(key_type) + + # setup parameters + f_kdfoptions = _FragList() + if password: + ciphername = _DEFAULT_CIPHER + blklen = _SSH_CIPHERS[ciphername].block_len + kdfname = _BCRYPT + rounds = _DEFAULT_ROUNDS + if ( + isinstance(encryption_algorithm, _KeySerializationEncryption) + and encryption_algorithm._kdf_rounds is not None + ): + rounds = encryption_algorithm._kdf_rounds + salt = os.urandom(16) + f_kdfoptions.put_sshstr(salt) + f_kdfoptions.put_u32(rounds) + ciph = _init_cipher(ciphername, password, salt, rounds) + else: + ciphername = kdfname = _NONE + blklen = 8 + ciph = None + nkeys = 1 + checkval = os.urandom(4) + comment = b"" + + # encode public and private parts together + f_public_key = _FragList() + f_public_key.put_sshstr(key_type) + kformat.encode_public(private_key.public_key(), f_public_key) + + f_secrets = _FragList([checkval, checkval]) + f_secrets.put_sshstr(key_type) + kformat.encode_private(private_key, f_secrets) + f_secrets.put_sshstr(comment) + f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)]) + + # top-level structure + f_main = _FragList() + f_main.put_raw(_SK_MAGIC) + f_main.put_sshstr(ciphername) + f_main.put_sshstr(kdfname) + f_main.put_sshstr(f_kdfoptions) + f_main.put_u32(nkeys) + f_main.put_sshstr(f_public_key) + f_main.put_sshstr(f_secrets) + + # copy result info bytearray + slen = f_secrets.size() + mlen = f_main.size() + buf = memoryview(bytearray(mlen + blklen)) + f_main.render(buf) + ofs = mlen - slen + + # encrypt in-place + if ciph is not None: + ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:]) + + return _ssh_pem_encode(buf[:mlen]) + + +SSHPublicKeyTypes = typing.Union[ + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + dsa.DSAPublicKey, + ed25519.Ed25519PublicKey, +] + +SSHCertPublicKeyTypes = typing.Union[ + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + ed25519.Ed25519PublicKey, +] + + +class SSHCertificateType(enum.Enum): + USER = 1 + HOST = 2 + + +class SSHCertificate: + def __init__( + self, + _nonce: memoryview, + _public_key: SSHPublicKeyTypes, + _serial: int, + _cctype: int, + _key_id: memoryview, + _valid_principals: list[bytes], + _valid_after: int, + _valid_before: int, + _critical_options: dict[bytes, bytes], + _extensions: dict[bytes, bytes], + _sig_type: memoryview, + _sig_key: memoryview, + _inner_sig_type: memoryview, + _signature: memoryview, + _tbs_cert_body: memoryview, + _cert_key_type: bytes, + _cert_body: memoryview, + ): + self._nonce = _nonce + self._public_key = _public_key + self._serial = _serial + try: + self._type = SSHCertificateType(_cctype) + except ValueError: + raise ValueError("Invalid certificate type") + self._key_id = _key_id + self._valid_principals = _valid_principals + self._valid_after = _valid_after + self._valid_before = _valid_before + self._critical_options = _critical_options + self._extensions = _extensions + self._sig_type = _sig_type + self._sig_key = _sig_key + self._inner_sig_type = _inner_sig_type + self._signature = _signature + self._cert_key_type = _cert_key_type + self._cert_body = _cert_body + self._tbs_cert_body = _tbs_cert_body + + @property + def nonce(self) -> bytes: + return bytes(self._nonce) + + def public_key(self) -> SSHCertPublicKeyTypes: + # make mypy happy until we remove DSA support entirely and + # the underlying union won't have a disallowed type + return typing.cast(SSHCertPublicKeyTypes, self._public_key) + + @property + def serial(self) -> int: + return self._serial + + @property + def type(self) -> SSHCertificateType: + return self._type + + @property + def key_id(self) -> bytes: + return bytes(self._key_id) + + @property + def valid_principals(self) -> list[bytes]: + return self._valid_principals + + @property + def valid_before(self) -> int: + return self._valid_before + + @property + def valid_after(self) -> int: + return self._valid_after + + @property + def critical_options(self) -> dict[bytes, bytes]: + return self._critical_options + + @property + def extensions(self) -> dict[bytes, bytes]: + return self._extensions + + def signature_key(self) -> SSHCertPublicKeyTypes: + sigformat = _lookup_kformat(self._sig_type) + signature_key, sigkey_rest = sigformat.load_public(self._sig_key) + _check_empty(sigkey_rest) + return signature_key + + def public_bytes(self) -> bytes: + return ( + bytes(self._cert_key_type) + + b" " + + binascii.b2a_base64(bytes(self._cert_body), newline=False) + ) + + def verify_cert_signature(self) -> None: + signature_key = self.signature_key() + if isinstance(signature_key, ed25519.Ed25519PublicKey): + signature_key.verify( + bytes(self._signature), bytes(self._tbs_cert_body) + ) + elif isinstance(signature_key, ec.EllipticCurvePublicKey): + # The signature is encoded as a pair of big-endian integers + r, data = _get_mpint(self._signature) + s, data = _get_mpint(data) + _check_empty(data) + computed_sig = asym_utils.encode_dss_signature(r, s) + hash_alg = _get_ec_hash_alg(signature_key.curve) + signature_key.verify( + computed_sig, bytes(self._tbs_cert_body), ec.ECDSA(hash_alg) + ) + else: + assert isinstance(signature_key, rsa.RSAPublicKey) + if self._inner_sig_type == _SSH_RSA: + hash_alg = hashes.SHA1() + elif self._inner_sig_type == _SSH_RSA_SHA256: + hash_alg = hashes.SHA256() + else: + assert self._inner_sig_type == _SSH_RSA_SHA512 + hash_alg = hashes.SHA512() + signature_key.verify( + bytes(self._signature), + bytes(self._tbs_cert_body), + padding.PKCS1v15(), + hash_alg, + ) + + +def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm: + if isinstance(curve, ec.SECP256R1): + return hashes.SHA256() + elif isinstance(curve, ec.SECP384R1): + return hashes.SHA384() + else: + assert isinstance(curve, ec.SECP521R1) + return hashes.SHA512() + + +def _load_ssh_public_identity( + data: utils.Buffer, + _legacy_dsa_allowed=False, +) -> SSHCertificate | SSHPublicKeyTypes: + utils._check_byteslike("data", data) + + m = _SSH_PUBKEY_RC.match(data) + if not m: + raise ValueError("Invalid line format") + key_type = orig_key_type = m.group(1) + key_body = m.group(2) + with_cert = False + if key_type.endswith(_CERT_SUFFIX): + with_cert = True + key_type = key_type[: -len(_CERT_SUFFIX)] + if key_type == _SSH_DSA and not _legacy_dsa_allowed: + raise UnsupportedAlgorithm( + "DSA keys aren't supported in SSH certificates" + ) + kformat = _lookup_kformat(key_type) + + try: + rest = memoryview(binascii.a2b_base64(key_body)) + except (TypeError, binascii.Error): + raise ValueError("Invalid format") + + if with_cert: + cert_body = rest + inner_key_type, rest = _get_sshstr(rest) + if inner_key_type != orig_key_type: + raise ValueError("Invalid key format") + if with_cert: + nonce, rest = _get_sshstr(rest) + public_key, rest = kformat.load_public(rest) + if with_cert: + serial, rest = _get_u64(rest) + cctype, rest = _get_u32(rest) + key_id, rest = _get_sshstr(rest) + principals, rest = _get_sshstr(rest) + valid_principals = [] + while principals: + principal, principals = _get_sshstr(principals) + valid_principals.append(bytes(principal)) + valid_after, rest = _get_u64(rest) + valid_before, rest = _get_u64(rest) + crit_options, rest = _get_sshstr(rest) + critical_options = _parse_exts_opts(crit_options) + exts, rest = _get_sshstr(rest) + extensions = _parse_exts_opts(exts) + # Get the reserved field, which is unused. + _, rest = _get_sshstr(rest) + sig_key_raw, rest = _get_sshstr(rest) + sig_type, sig_key = _get_sshstr(sig_key_raw) + if sig_type == _SSH_DSA and not _legacy_dsa_allowed: + raise UnsupportedAlgorithm( + "DSA signatures aren't supported in SSH certificates" + ) + # Get the entire cert body and subtract the signature + tbs_cert_body = cert_body[: -len(rest)] + signature_raw, rest = _get_sshstr(rest) + _check_empty(rest) + inner_sig_type, sig_rest = _get_sshstr(signature_raw) + # RSA certs can have multiple algorithm types + if ( + sig_type == _SSH_RSA + and inner_sig_type + not in [_SSH_RSA_SHA256, _SSH_RSA_SHA512, _SSH_RSA] + ) or (sig_type != _SSH_RSA and inner_sig_type != sig_type): + raise ValueError("Signature key type does not match") + signature, sig_rest = _get_sshstr(sig_rest) + _check_empty(sig_rest) + return SSHCertificate( + nonce, + public_key, + serial, + cctype, + key_id, + valid_principals, + valid_after, + valid_before, + critical_options, + extensions, + sig_type, + sig_key, + inner_sig_type, + signature, + tbs_cert_body, + orig_key_type, + cert_body, + ) + else: + _check_empty(rest) + return public_key + + +def load_ssh_public_identity( + data: utils.Buffer, +) -> SSHCertificate | SSHPublicKeyTypes: + return _load_ssh_public_identity(data) + + +def _parse_exts_opts(exts_opts: memoryview) -> dict[bytes, bytes]: + result: dict[bytes, bytes] = {} + last_name = None + while exts_opts: + name, exts_opts = _get_sshstr(exts_opts) + bname: bytes = bytes(name) + if bname in result: + raise ValueError("Duplicate name") + if last_name is not None and bname < last_name: + raise ValueError("Fields not lexically sorted") + value, exts_opts = _get_sshstr(exts_opts) + if len(value) > 0: + value, extra = _get_sshstr(value) + if len(extra) > 0: + raise ValueError("Unexpected extra data after value") + result[bname] = bytes(value) + last_name = bname + return result + + +def ssh_key_fingerprint( + key: SSHPublicKeyTypes, + hash_algorithm: hashes.MD5 | hashes.SHA256, +) -> bytes: + if not isinstance(hash_algorithm, (hashes.MD5, hashes.SHA256)): + raise TypeError("hash_algorithm must be either MD5 or SHA256") + + key_type = _get_ssh_key_type(key) + kformat = _lookup_kformat(key_type) + + f_pub = _FragList() + f_pub.put_sshstr(key_type) + kformat.encode_public(key, f_pub) + + ssh_binary_data = f_pub.tobytes() + + # Hash the binary data + hash_obj = hashes.Hash(hash_algorithm) + hash_obj.update(ssh_binary_data) + return hash_obj.finalize() + + +def load_ssh_public_key( + data: utils.Buffer, backend: typing.Any = None +) -> SSHPublicKeyTypes: + cert_or_key = _load_ssh_public_identity(data, _legacy_dsa_allowed=True) + public_key: SSHPublicKeyTypes + if isinstance(cert_or_key, SSHCertificate): + public_key = cert_or_key.public_key() + else: + public_key = cert_or_key + + if isinstance(public_key, dsa.DSAPublicKey): + warnings.warn( + "SSH DSA keys are deprecated and will be removed in a future " + "release.", + utils.DeprecatedIn40, + stacklevel=2, + ) + return public_key + + +def serialize_ssh_public_key(public_key: SSHPublicKeyTypes) -> bytes: + """One-line public key format for OpenSSH""" + if isinstance(public_key, dsa.DSAPublicKey): + warnings.warn( + "SSH DSA key support is deprecated and will be " + "removed in a future release", + utils.DeprecatedIn40, + stacklevel=4, + ) + key_type = _get_ssh_key_type(public_key) + kformat = _lookup_kformat(key_type) + + f_pub = _FragList() + f_pub.put_sshstr(key_type) + kformat.encode_public(public_key, f_pub) + + pub = binascii.b2a_base64(f_pub.tobytes()).strip() + return b"".join([key_type, b" ", pub]) + + +SSHCertPrivateKeyTypes = typing.Union[ + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + ed25519.Ed25519PrivateKey, +] + + +# This is an undocumented limit enforced in the openssh codebase for sshd and +# ssh-keygen, but it is undefined in the ssh certificates spec. +_SSHKEY_CERT_MAX_PRINCIPALS = 256 + + +class SSHCertificateBuilder: + def __init__( + self, + _public_key: SSHCertPublicKeyTypes | None = None, + _serial: int | None = None, + _type: SSHCertificateType | None = None, + _key_id: bytes | None = None, + _valid_principals: list[bytes] = [], + _valid_for_all_principals: bool = False, + _valid_before: int | None = None, + _valid_after: int | None = None, + _critical_options: list[tuple[bytes, bytes]] = [], + _extensions: list[tuple[bytes, bytes]] = [], + ): + self._public_key = _public_key + self._serial = _serial + self._type = _type + self._key_id = _key_id + self._valid_principals = _valid_principals + self._valid_for_all_principals = _valid_for_all_principals + self._valid_before = _valid_before + self._valid_after = _valid_after + self._critical_options = _critical_options + self._extensions = _extensions + + def public_key( + self, public_key: SSHCertPublicKeyTypes + ) -> SSHCertificateBuilder: + if not isinstance( + public_key, + ( + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + ed25519.Ed25519PublicKey, + ), + ): + raise TypeError("Unsupported key type") + if self._public_key is not None: + raise ValueError("public_key already set") + + return SSHCertificateBuilder( + _public_key=public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def serial(self, serial: int) -> SSHCertificateBuilder: + if not isinstance(serial, int): + raise TypeError("serial must be an integer") + if not 0 <= serial < 2**64: + raise ValueError("serial must be between 0 and 2**64") + if self._serial is not None: + raise ValueError("serial already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def type(self, type: SSHCertificateType) -> SSHCertificateBuilder: + if not isinstance(type, SSHCertificateType): + raise TypeError("type must be an SSHCertificateType") + if self._type is not None: + raise ValueError("type already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def key_id(self, key_id: bytes) -> SSHCertificateBuilder: + if not isinstance(key_id, bytes): + raise TypeError("key_id must be bytes") + if self._key_id is not None: + raise ValueError("key_id already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_principals( + self, valid_principals: list[bytes] + ) -> SSHCertificateBuilder: + if self._valid_for_all_principals: + raise ValueError( + "Principals can't be set because the cert is valid " + "for all principals" + ) + if ( + not all(isinstance(x, bytes) for x in valid_principals) + or not valid_principals + ): + raise TypeError( + "principals must be a list of bytes and can't be empty" + ) + if self._valid_principals: + raise ValueError("valid_principals already set") + + if len(valid_principals) > _SSHKEY_CERT_MAX_PRINCIPALS: + raise ValueError( + "Reached or exceeded the maximum number of valid_principals" + ) + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_for_all_principals(self): + if self._valid_principals: + raise ValueError( + "valid_principals already set, can't set " + "valid_for_all_principals" + ) + if self._valid_for_all_principals: + raise ValueError("valid_for_all_principals already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=True, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_before(self, valid_before: int | float) -> SSHCertificateBuilder: + if not isinstance(valid_before, (int, float)): + raise TypeError("valid_before must be an int or float") + valid_before = int(valid_before) + if valid_before < 0 or valid_before >= 2**64: + raise ValueError("valid_before must [0, 2**64)") + if self._valid_before is not None: + raise ValueError("valid_before already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_after(self, valid_after: int | float) -> SSHCertificateBuilder: + if not isinstance(valid_after, (int, float)): + raise TypeError("valid_after must be an int or float") + valid_after = int(valid_after) + if valid_after < 0 or valid_after >= 2**64: + raise ValueError("valid_after must [0, 2**64)") + if self._valid_after is not None: + raise ValueError("valid_after already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def add_critical_option( + self, name: bytes, value: bytes + ) -> SSHCertificateBuilder: + if not isinstance(name, bytes) or not isinstance(value, bytes): + raise TypeError("name and value must be bytes") + # This is O(n**2) + if name in [name for name, _ in self._critical_options]: + raise ValueError("Duplicate critical option name") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=[*self._critical_options, (name, value)], + _extensions=self._extensions, + ) + + def add_extension( + self, name: bytes, value: bytes + ) -> SSHCertificateBuilder: + if not isinstance(name, bytes) or not isinstance(value, bytes): + raise TypeError("name and value must be bytes") + # This is O(n**2) + if name in [name for name, _ in self._extensions]: + raise ValueError("Duplicate extension name") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=[*self._extensions, (name, value)], + ) + + def sign(self, private_key: SSHCertPrivateKeyTypes) -> SSHCertificate: + if not isinstance( + private_key, + ( + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + ed25519.Ed25519PrivateKey, + ), + ): + raise TypeError("Unsupported private key type") + + if self._public_key is None: + raise ValueError("public_key must be set") + + # Not required + serial = 0 if self._serial is None else self._serial + + if self._type is None: + raise ValueError("type must be set") + + # Not required + key_id = b"" if self._key_id is None else self._key_id + + # A zero length list is valid, but means the certificate + # is valid for any principal of the specified type. We require + # the user to explicitly set valid_for_all_principals to get + # that behavior. + if not self._valid_principals and not self._valid_for_all_principals: + raise ValueError( + "valid_principals must be set if valid_for_all_principals " + "is False" + ) + + if self._valid_before is None: + raise ValueError("valid_before must be set") + + if self._valid_after is None: + raise ValueError("valid_after must be set") + + if self._valid_after > self._valid_before: + raise ValueError("valid_after must be earlier than valid_before") + + # lexically sort our byte strings + self._critical_options.sort(key=lambda x: x[0]) + self._extensions.sort(key=lambda x: x[0]) + + key_type = _get_ssh_key_type(self._public_key) + cert_prefix = key_type + _CERT_SUFFIX + + # Marshal the bytes to be signed + nonce = os.urandom(32) + kformat = _lookup_kformat(key_type) + f = _FragList() + f.put_sshstr(cert_prefix) + f.put_sshstr(nonce) + kformat.encode_public(self._public_key, f) + f.put_u64(serial) + f.put_u32(self._type.value) + f.put_sshstr(key_id) + fprincipals = _FragList() + for p in self._valid_principals: + fprincipals.put_sshstr(p) + f.put_sshstr(fprincipals.tobytes()) + f.put_u64(self._valid_after) + f.put_u64(self._valid_before) + fcrit = _FragList() + for name, value in self._critical_options: + fcrit.put_sshstr(name) + if len(value) > 0: + foptval = _FragList() + foptval.put_sshstr(value) + fcrit.put_sshstr(foptval.tobytes()) + else: + fcrit.put_sshstr(value) + f.put_sshstr(fcrit.tobytes()) + fext = _FragList() + for name, value in self._extensions: + fext.put_sshstr(name) + if len(value) > 0: + fextval = _FragList() + fextval.put_sshstr(value) + fext.put_sshstr(fextval.tobytes()) + else: + fext.put_sshstr(value) + f.put_sshstr(fext.tobytes()) + f.put_sshstr(b"") # RESERVED FIELD + # encode CA public key + ca_type = _get_ssh_key_type(private_key) + caformat = _lookup_kformat(ca_type) + caf = _FragList() + caf.put_sshstr(ca_type) + caformat.encode_public(private_key.public_key(), caf) + f.put_sshstr(caf.tobytes()) + # Sigs according to the rules defined for the CA's public key + # (RFC4253 section 6.6 for ssh-rsa, RFC5656 for ECDSA, + # and RFC8032 for Ed25519). + if isinstance(private_key, ed25519.Ed25519PrivateKey): + signature = private_key.sign(f.tobytes()) + fsig = _FragList() + fsig.put_sshstr(ca_type) + fsig.put_sshstr(signature) + f.put_sshstr(fsig.tobytes()) + elif isinstance(private_key, ec.EllipticCurvePrivateKey): + hash_alg = _get_ec_hash_alg(private_key.curve) + signature = private_key.sign(f.tobytes(), ec.ECDSA(hash_alg)) + r, s = asym_utils.decode_dss_signature(signature) + fsig = _FragList() + fsig.put_sshstr(ca_type) + fsigblob = _FragList() + fsigblob.put_mpint(r) + fsigblob.put_mpint(s) + fsig.put_sshstr(fsigblob.tobytes()) + f.put_sshstr(fsig.tobytes()) + + else: + assert isinstance(private_key, rsa.RSAPrivateKey) + # Just like Golang, we're going to use SHA512 for RSA + # https://cs.opensource.google/go/x/crypto/+/refs/tags/ + # v0.4.0:ssh/certs.go;l=445 + # RFC 8332 defines SHA256 and 512 as options + fsig = _FragList() + fsig.put_sshstr(_SSH_RSA_SHA512) + signature = private_key.sign( + f.tobytes(), padding.PKCS1v15(), hashes.SHA512() + ) + fsig.put_sshstr(signature) + f.put_sshstr(fsig.tobytes()) + + cert_data = binascii.b2a_base64(f.tobytes()).strip() + # load_ssh_public_identity returns a union, but this is + # guaranteed to be an SSHCertificate, so we cast to make + # mypy happy. + return typing.cast( + SSHCertificate, + load_ssh_public_identity(b"".join([cert_prefix, b" ", cert_data])), + ) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 100644 index 0000000..c1af423 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + + +class InvalidToken(Exception): + pass diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..bb519cb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-39.pyc new file mode 100644 index 0000000..2fb5265 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-39.pyc new file mode 100644 index 0000000..f7e58e8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py new file mode 100644 index 0000000..21fb000 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -0,0 +1,101 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import base64 +import typing +from urllib.parse import quote, urlencode + +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.utils import Buffer + +HOTPHashTypes = typing.Union[SHA1, SHA256, SHA512] + + +def _generate_uri( + hotp: HOTP, + type_name: str, + account_name: str, + issuer: str | None, + extra_parameters: list[tuple[str, int]], +) -> str: + parameters = [ + ("digits", hotp._length), + ("secret", base64.b32encode(hotp._key)), + ("algorithm", hotp._algorithm.name.upper()), + ] + + if issuer is not None: + parameters.append(("issuer", issuer)) + + parameters.extend(extra_parameters) + + label = ( + f"{quote(issuer)}:{quote(account_name)}" + if issuer + else quote(account_name) + ) + return f"otpauth://{type_name}/{label}?{urlencode(parameters)}" + + +class HOTP: + def __init__( + self, + key: Buffer, + length: int, + algorithm: HOTPHashTypes, + backend: typing.Any = None, + enforce_key_length: bool = True, + ) -> None: + if len(key) < 16 and enforce_key_length is True: + raise ValueError("Key length has to be at least 128 bits.") + + if not isinstance(length, int): + raise TypeError("Length parameter must be an integer type.") + + if length < 6 or length > 8: + raise ValueError("Length of HOTP has to be between 6 and 8.") + + if not isinstance(algorithm, (SHA1, SHA256, SHA512)): + raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") + + self._key = key + self._length = length + self._algorithm = algorithm + + def generate(self, counter: int) -> bytes: + if not isinstance(counter, int): + raise TypeError("Counter parameter must be an integer type.") + + truncated_value = self._dynamic_truncate(counter) + hotp = truncated_value % (10**self._length) + return "{0:0{1}}".format(hotp, self._length).encode() + + def verify(self, hotp: bytes, counter: int) -> None: + if not constant_time.bytes_eq(self.generate(counter), hotp): + raise InvalidToken("Supplied HOTP value does not match.") + + def _dynamic_truncate(self, counter: int) -> int: + ctx = hmac.HMAC(self._key, self._algorithm) + + try: + ctx.update(counter.to_bytes(length=8, byteorder="big")) + except OverflowError: + raise ValueError(f"Counter must be between 0 and {2**64 - 1}.") + + hmac_value = ctx.finalize() + + offset = hmac_value[len(hmac_value) - 1] & 0b1111 + p = hmac_value[offset : offset + 4] + return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF + + def get_provisioning_uri( + self, account_name: str, counter: int, issuer: str | None + ) -> str: + return _generate_uri( + self, "hotp", account_name, issuer, [("counter", int(counter))] + ) diff --git a/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/totp.py new file mode 100644 index 0000000..10c725c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -0,0 +1,56 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.hotp import ( + HOTP, + HOTPHashTypes, + _generate_uri, +) +from cryptography.utils import Buffer + + +class TOTP: + def __init__( + self, + key: Buffer, + length: int, + algorithm: HOTPHashTypes, + time_step: int, + backend: typing.Any = None, + enforce_key_length: bool = True, + ): + self._time_step = time_step + self._hotp = HOTP( + key, length, algorithm, enforce_key_length=enforce_key_length + ) + + def generate(self, time: int | float) -> bytes: + if not isinstance(time, (int, float)): + raise TypeError( + "Time parameter must be an integer type or float type." + ) + + counter = int(time / self._time_step) + return self._hotp.generate(counter) + + def verify(self, totp: bytes, time: int) -> None: + if not constant_time.bytes_eq(self.generate(time), totp): + raise InvalidToken("Supplied TOTP value does not match.") + + def get_provisioning_uri( + self, account_name: str, issuer: str | None + ) -> str: + return _generate_uri( + self._hotp, + "totp", + account_name, + issuer, + [("period", int(self._time_step))], + ) diff --git a/.venv/lib/python3.9/site-packages/cryptography/py.typed b/.venv/lib/python3.9/site-packages/cryptography/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/cryptography/utils.py b/.venv/lib/python3.9/site-packages/cryptography/utils.py new file mode 100644 index 0000000..3a930fd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/utils.py @@ -0,0 +1,138 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import enum +import sys +import types +import typing +import warnings +from collections.abc import Callable, Sequence + + +# We use a UserWarning subclass, instead of DeprecationWarning, because CPython +# decided deprecation warnings should be invisible by default. +class CryptographyDeprecationWarning(UserWarning): + pass + + +# Several APIs were deprecated with no specific end-of-life date because of the +# ubiquity of their use. They should not be removed until we agree on when that +# cycle ends. +DeprecatedIn36 = CryptographyDeprecationWarning +DeprecatedIn40 = CryptographyDeprecationWarning +DeprecatedIn41 = CryptographyDeprecationWarning +DeprecatedIn42 = CryptographyDeprecationWarning +DeprecatedIn43 = CryptographyDeprecationWarning +DeprecatedIn46 = CryptographyDeprecationWarning + + +# If you're wondering why we don't use `Buffer`, it's because `Buffer` would +# be more accurately named: Bufferable. It means something which has an +# `__buffer__`. Which means you can't actually treat the result as a buffer +# (and do things like take a `len()`). +if sys.version_info >= (3, 9): + Buffer = typing.Union[bytes, bytearray, memoryview] +else: + Buffer = typing.ByteString + + +def _check_bytes(name: str, value: bytes) -> None: + if not isinstance(value, bytes): + raise TypeError(f"{name} must be bytes") + + +def _check_byteslike(name: str, value: Buffer) -> None: + try: + memoryview(value) + except TypeError: + raise TypeError(f"{name} must be bytes-like") + + +def int_to_bytes(integer: int, length: int | None = None) -> bytes: + if length == 0: + raise ValueError("length argument can't be 0") + return integer.to_bytes( + length or (integer.bit_length() + 7) // 8 or 1, "big" + ) + + +class InterfaceNotImplemented(Exception): + pass + + +class _DeprecatedValue: + def __init__(self, value: object, message: str, warning_class): + self.value = value + self.message = message + self.warning_class = warning_class + + +class _ModuleWithDeprecations(types.ModuleType): + def __init__(self, module: types.ModuleType): + super().__init__(module.__name__) + self.__dict__["_module"] = module + + def __getattr__(self, attr: str) -> object: + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + obj = obj.value + return obj + + def __setattr__(self, attr: str, value: object) -> None: + setattr(self._module, attr, value) + + def __delattr__(self, attr: str) -> None: + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + + delattr(self._module, attr) + + def __dir__(self) -> Sequence[str]: + return ["_module", *dir(self._module)] + + +def deprecated( + value: object, + module_name: str, + message: str, + warning_class: type[Warning], + name: str | None = None, +) -> _DeprecatedValue: + module = sys.modules[module_name] + if not isinstance(module, _ModuleWithDeprecations): + sys.modules[module_name] = module = _ModuleWithDeprecations(module) + dv = _DeprecatedValue(value, message, warning_class) + # Maintain backwards compatibility with `name is None` for pyOpenSSL. + if name is not None: + setattr(module, name, dv) + return dv + + +def cached_property(func: Callable) -> property: + cached_name = f"_cached_{func}" + sentinel = object() + + def inner(instance: object): + cache = getattr(instance, cached_name, sentinel) + if cache is not sentinel: + return cache + result = func(instance) + setattr(instance, cached_name, result) + return result + + return property(inner) + + +# Python 3.10 changed representation of enums. We use well-defined object +# representation and string representation from Python 3.9. +class Enum(enum.Enum): + def __repr__(self) -> str: + return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>" + + def __str__(self) -> str: + return f"{self.__class__.__name__}.{self._name_}" diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/__init__.py b/.venv/lib/python3.9/site-packages/cryptography/x509/__init__.py new file mode 100644 index 0000000..318eecc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/x509/__init__.py @@ -0,0 +1,270 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.x509 import certificate_transparency, verification +from cryptography.x509.base import ( + Attribute, + AttributeNotFound, + Attributes, + Certificate, + CertificateBuilder, + CertificateRevocationList, + CertificateRevocationListBuilder, + CertificateSigningRequest, + CertificateSigningRequestBuilder, + InvalidVersion, + RevokedCertificate, + RevokedCertificateBuilder, + Version, + load_der_x509_certificate, + load_der_x509_crl, + load_der_x509_csr, + load_pem_x509_certificate, + load_pem_x509_certificates, + load_pem_x509_crl, + load_pem_x509_csr, + random_serial_number, +) +from cryptography.x509.extensions import ( + AccessDescription, + Admission, + Admissions, + AuthorityInformationAccess, + AuthorityKeyIdentifier, + BasicConstraints, + CertificateIssuer, + CertificatePolicies, + CRLDistributionPoints, + CRLNumber, + CRLReason, + DeltaCRLIndicator, + DistributionPoint, + DuplicateExtension, + ExtendedKeyUsage, + Extension, + ExtensionNotFound, + Extensions, + ExtensionType, + FreshestCRL, + GeneralNames, + InhibitAnyPolicy, + InvalidityDate, + IssuerAlternativeName, + IssuingDistributionPoint, + KeyUsage, + MSCertificateTemplate, + NameConstraints, + NamingAuthority, + NoticeReference, + OCSPAcceptableResponses, + OCSPNoCheck, + OCSPNonce, + PolicyConstraints, + PolicyInformation, + PrecertificateSignedCertificateTimestamps, + PrecertPoison, + PrivateKeyUsagePeriod, + ProfessionInfo, + ReasonFlags, + SignedCertificateTimestamps, + SubjectAlternativeName, + SubjectInformationAccess, + SubjectKeyIdentifier, + TLSFeature, + TLSFeatureType, + UnrecognizedExtension, + UserNotice, +) +from cryptography.x509.general_name import ( + DirectoryName, + DNSName, + GeneralName, + IPAddress, + OtherName, + RegisteredID, + RFC822Name, + UniformResourceIdentifier, + UnsupportedGeneralNameType, +) +from cryptography.x509.name import ( + Name, + NameAttribute, + RelativeDistinguishedName, +) +from cryptography.x509.oid import ( + AuthorityInformationAccessOID, + CertificatePoliciesOID, + CRLEntryExtensionOID, + ExtendedKeyUsageOID, + ExtensionOID, + NameOID, + ObjectIdentifier, + PublicKeyAlgorithmOID, + SignatureAlgorithmOID, +) + +OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS +OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER +OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS +OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES +OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS +OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE +OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL +OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY +OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME +OID_KEY_USAGE = ExtensionOID.KEY_USAGE +OID_PRIVATE_KEY_USAGE_PERIOD = ExtensionOID.PRIVATE_KEY_USAGE_PERIOD +OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS +OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK +OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS +OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS +OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME +OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES +OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS +OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER + +OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 +OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 +OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 +OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 +OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 +OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 +OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 +OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 +OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 +OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 +OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 +OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 +OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 +OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 +OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS + +OID_COMMON_NAME = NameOID.COMMON_NAME +OID_COUNTRY_NAME = NameOID.COUNTRY_NAME +OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT +OID_DN_QUALIFIER = NameOID.DN_QUALIFIER +OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS +OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER +OID_GIVEN_NAME = NameOID.GIVEN_NAME +OID_LOCALITY_NAME = NameOID.LOCALITY_NAME +OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME +OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME +OID_PSEUDONYM = NameOID.PSEUDONYM +OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER +OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME +OID_SURNAME = NameOID.SURNAME +OID_TITLE = NameOID.TITLE + +OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH +OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING +OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION +OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING +OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH +OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING + +OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY +OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER +OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE + +OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER +OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON +OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE + +OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS +OID_OCSP = AuthorityInformationAccessOID.OCSP + +__all__ = [ + "OID_CA_ISSUERS", + "OID_OCSP", + "AccessDescription", + "Admission", + "Admissions", + "Attribute", + "AttributeNotFound", + "Attributes", + "AuthorityInformationAccess", + "AuthorityKeyIdentifier", + "BasicConstraints", + "CRLDistributionPoints", + "CRLNumber", + "CRLReason", + "Certificate", + "CertificateBuilder", + "CertificateIssuer", + "CertificatePolicies", + "CertificateRevocationList", + "CertificateRevocationListBuilder", + "CertificateSigningRequest", + "CertificateSigningRequestBuilder", + "DNSName", + "DeltaCRLIndicator", + "DirectoryName", + "DistributionPoint", + "DuplicateExtension", + "ExtendedKeyUsage", + "Extension", + "ExtensionNotFound", + "ExtensionType", + "Extensions", + "FreshestCRL", + "GeneralName", + "GeneralNames", + "IPAddress", + "InhibitAnyPolicy", + "InvalidVersion", + "InvalidityDate", + "IssuerAlternativeName", + "IssuingDistributionPoint", + "KeyUsage", + "MSCertificateTemplate", + "Name", + "NameAttribute", + "NameConstraints", + "NameOID", + "NamingAuthority", + "NoticeReference", + "OCSPAcceptableResponses", + "OCSPNoCheck", + "OCSPNonce", + "ObjectIdentifier", + "OtherName", + "PolicyConstraints", + "PolicyInformation", + "PrecertPoison", + "PrecertificateSignedCertificateTimestamps", + "PrivateKeyUsagePeriod", + "ProfessionInfo", + "PublicKeyAlgorithmOID", + "RFC822Name", + "ReasonFlags", + "RegisteredID", + "RelativeDistinguishedName", + "RevokedCertificate", + "RevokedCertificateBuilder", + "SignatureAlgorithmOID", + "SignedCertificateTimestamps", + "SubjectAlternativeName", + "SubjectInformationAccess", + "SubjectKeyIdentifier", + "TLSFeature", + "TLSFeatureType", + "UniformResourceIdentifier", + "UnrecognizedExtension", + "UnsupportedGeneralNameType", + "UserNotice", + "Version", + "certificate_transparency", + "load_der_x509_certificate", + "load_der_x509_crl", + "load_der_x509_csr", + "load_pem_x509_certificate", + "load_pem_x509_certificates", + "load_pem_x509_crl", + "load_pem_x509_csr", + "random_serial_number", + "verification", + "verification", +] diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..27afbc9 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..333f290 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-39.pyc new file mode 100644 index 0000000..16691fc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/extensions.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/extensions.cpython-39.pyc new file mode 100644 index 0000000..f03bf49 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/extensions.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/general_name.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/general_name.cpython-39.pyc new file mode 100644 index 0000000..cdbcd95 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/general_name.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/name.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/name.cpython-39.pyc new file mode 100644 index 0000000..b47bfbd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/name.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/ocsp.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/ocsp.cpython-39.pyc new file mode 100644 index 0000000..eae0eb6 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/ocsp.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/oid.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/oid.cpython-39.pyc new file mode 100644 index 0000000..eddd1d1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/oid.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/verification.cpython-39.pyc b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/verification.cpython-39.pyc new file mode 100644 index 0000000..5873b7f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/cryptography/x509/__pycache__/verification.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/base.py b/.venv/lib/python3.9/site-packages/cryptography/x509/base.py new file mode 100644 index 0000000..1be612b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/x509/base.py @@ -0,0 +1,848 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import datetime +import os +import typing +import warnings +from collections.abc import Iterable + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed448, + ed25519, + padding, + rsa, + x448, + x25519, +) +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPrivateKeyTypes, + CertificatePublicKeyTypes, +) +from cryptography.x509.extensions import ( + Extension, + Extensions, + ExtensionType, + _make_sequence_methods, +) +from cryptography.x509.name import Name, _ASN1Type +from cryptography.x509.oid import ObjectIdentifier + +_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1) + +# This must be kept in sync with sign.rs's list of allowable types in +# identify_hash_type +_AllowedHashTypes = typing.Union[ + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA3_224, + hashes.SHA3_256, + hashes.SHA3_384, + hashes.SHA3_512, +] + + +class AttributeNotFound(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +def _reject_duplicate_extension( + extension: Extension[ExtensionType], + extensions: list[Extension[ExtensionType]], +) -> None: + # This is quadratic in the number of extensions + for e in extensions: + if e.oid == extension.oid: + raise ValueError("This extension has already been set.") + + +def _reject_duplicate_attribute( + oid: ObjectIdentifier, + attributes: list[tuple[ObjectIdentifier, bytes, int | None]], +) -> None: + # This is quadratic in the number of attributes + for attr_oid, _, _ in attributes: + if attr_oid == oid: + raise ValueError("This attribute has already been set.") + + +def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime: + """Normalizes a datetime to a naive datetime in UTC. + + time -- datetime to normalize. Assumed to be in UTC if not timezone + aware. + """ + if time.tzinfo is not None: + offset = time.utcoffset() + offset = offset if offset else datetime.timedelta() + return time.replace(tzinfo=None) - offset + else: + return time + + +class Attribute: + def __init__( + self, + oid: ObjectIdentifier, + value: bytes, + _type: int = _ASN1Type.UTF8String.value, + ) -> None: + self._oid = oid + self._value = value + self._type = _type + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Attribute): + return NotImplemented + + return ( + self.oid == other.oid + and self.value == other.value + and self._type == other._type + ) + + def __hash__(self) -> int: + return hash((self.oid, self.value, self._type)) + + +class Attributes: + def __init__( + self, + attributes: Iterable[Attribute], + ) -> None: + self._attributes = list(attributes) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes") + + def __repr__(self) -> str: + return f"" + + def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute: + for attr in self: + if attr.oid == oid: + return attr + + raise AttributeNotFound(f"No {oid} attribute was found", oid) + + +class Version(utils.Enum): + v1 = 0 + v3 = 2 + + +class InvalidVersion(Exception): + def __init__(self, msg: str, parsed_version: int) -> None: + super().__init__(msg) + self.parsed_version = parsed_version + + +Certificate = rust_x509.Certificate + + +class RevokedCertificate(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def serial_number(self) -> int: + """ + Returns the serial number of the revoked certificate. + """ + + @property + @abc.abstractmethod + def revocation_date(self) -> datetime.datetime: + """ + Returns the date of when this certificate was revoked. + """ + + @property + @abc.abstractmethod + def revocation_date_utc(self) -> datetime.datetime: + """ + Returns the date of when this certificate was revoked as a non-naive + UTC datetime. + """ + + @property + @abc.abstractmethod + def extensions(self) -> Extensions: + """ + Returns an Extensions object containing a list of Revoked extensions. + """ + + +# Runtime isinstance checks need this since the rust class is not a subclass. +RevokedCertificate.register(rust_x509.RevokedCertificate) + + +class _RawRevokedCertificate(RevokedCertificate): + def __init__( + self, + serial_number: int, + revocation_date: datetime.datetime, + extensions: Extensions, + ): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + @property + def serial_number(self) -> int: + return self._serial_number + + @property + def revocation_date(self) -> datetime.datetime: + warnings.warn( + "Properties that return a naïve datetime object have been " + "deprecated. Please switch to revocation_date_utc.", + utils.DeprecatedIn42, + stacklevel=2, + ) + return self._revocation_date + + @property + def revocation_date_utc(self) -> datetime.datetime: + return self._revocation_date.replace(tzinfo=datetime.timezone.utc) + + @property + def extensions(self) -> Extensions: + return self._extensions + + +CertificateRevocationList = rust_x509.CertificateRevocationList +CertificateSigningRequest = rust_x509.CertificateSigningRequest + + +load_pem_x509_certificate = rust_x509.load_pem_x509_certificate +load_der_x509_certificate = rust_x509.load_der_x509_certificate + +load_pem_x509_certificates = rust_x509.load_pem_x509_certificates + +load_pem_x509_csr = rust_x509.load_pem_x509_csr +load_der_x509_csr = rust_x509.load_der_x509_csr + +load_pem_x509_crl = rust_x509.load_pem_x509_crl +load_der_x509_crl = rust_x509.load_der_x509_crl + + +class CertificateSigningRequestBuilder: + def __init__( + self, + subject_name: Name | None = None, + extensions: list[Extension[ExtensionType]] = [], + attributes: list[tuple[ObjectIdentifier, bytes, int | None]] = [], + ): + """ + Creates an empty X.509 certificate request (v1). + """ + self._subject_name = subject_name + self._extensions = extensions + self._attributes = attributes + + def subject_name(self, name: Name) -> CertificateSigningRequestBuilder: + """ + Sets the certificate requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._subject_name is not None: + raise ValueError("The subject name may only be set once.") + return CertificateSigningRequestBuilder( + name, self._extensions, self._attributes + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateSigningRequestBuilder: + """ + Adds an X.509 extension to the certificate request. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateSigningRequestBuilder( + self._subject_name, + [*self._extensions, extension], + self._attributes, + ) + + def add_attribute( + self, + oid: ObjectIdentifier, + value: bytes, + *, + _tag: _ASN1Type | None = None, + ) -> CertificateSigningRequestBuilder: + """ + Adds an X.509 attribute with an OID and associated value. + """ + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + + if not isinstance(value, bytes): + raise TypeError("value must be bytes") + + if _tag is not None and not isinstance(_tag, _ASN1Type): + raise TypeError("tag must be _ASN1Type") + + _reject_duplicate_attribute(oid, self._attributes) + + if _tag is not None: + tag = _tag.value + else: + tag = None + + return CertificateSigningRequestBuilder( + self._subject_name, + self._extensions, + [*self._attributes, (oid, value, tag)], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ecdsa_deterministic: bool | None = None, + ) -> CertificateSigningRequest: + """ + Signs the request using the requestor's private key. + """ + if self._subject_name is None: + raise ValueError("A CertificateSigningRequest must have a subject") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + if ecdsa_deterministic is not None: + if not isinstance(private_key, ec.EllipticCurvePrivateKey): + raise TypeError( + "Deterministic ECDSA is only supported for EC keys" + ) + + return rust_x509.create_x509_csr( + self, + private_key, + algorithm, + rsa_padding, + ecdsa_deterministic, + ) + + +class CertificateBuilder: + _extensions: list[Extension[ExtensionType]] + + def __init__( + self, + issuer_name: Name | None = None, + subject_name: Name | None = None, + public_key: CertificatePublicKeyTypes | None = None, + serial_number: int | None = None, + not_valid_before: datetime.datetime | None = None, + not_valid_after: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + ) -> None: + self._version = Version.v3 + self._issuer_name = issuer_name + self._subject_name = subject_name + self._public_key = public_key + self._serial_number = serial_number + self._not_valid_before = not_valid_before + self._not_valid_after = not_valid_after + self._extensions = extensions + + def issuer_name(self, name: Name) -> CertificateBuilder: + """ + Sets the CA's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._issuer_name is not None: + raise ValueError("The issuer name may only be set once.") + return CertificateBuilder( + name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def subject_name(self, name: Name) -> CertificateBuilder: + """ + Sets the requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._subject_name is not None: + raise ValueError("The subject name may only be set once.") + return CertificateBuilder( + self._issuer_name, + name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def public_key( + self, + key: CertificatePublicKeyTypes, + ) -> CertificateBuilder: + """ + Sets the requestor's public key (as found in the signing request). + """ + if not isinstance( + key, + ( + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, + ), + ): + raise TypeError( + "Expecting one of DSAPublicKey, RSAPublicKey," + " EllipticCurvePublicKey, Ed25519PublicKey," + " Ed448PublicKey, X25519PublicKey, or " + "X448PublicKey." + ) + if self._public_key is not None: + raise ValueError("The public key may only be set once.") + return CertificateBuilder( + self._issuer_name, + self._subject_name, + key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def serial_number(self, number: int) -> CertificateBuilder: + """ + Sets the certificate serial number. + """ + if not isinstance(number, int): + raise TypeError("Serial number must be of integral type.") + if self._serial_number is not None: + raise ValueError("The serial number may only be set once.") + if number <= 0: + raise ValueError("The serial number should be positive.") + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError( + "The serial number should not be more than 159 bits." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def not_valid_before(self, time: datetime.datetime) -> CertificateBuilder: + """ + Sets the certificate activation time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._not_valid_before is not None: + raise ValueError("The not valid before may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The not valid before date must be on or after" + " 1950 January 1)." + ) + if self._not_valid_after is not None and time > self._not_valid_after: + raise ValueError( + "The not valid before date must be before the not valid after " + "date." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + time, + self._not_valid_after, + self._extensions, + ) + + def not_valid_after(self, time: datetime.datetime) -> CertificateBuilder: + """ + Sets the certificate expiration time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._not_valid_after is not None: + raise ValueError("The not valid after may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The not valid after date must be on or after 1950 January 1." + ) + if ( + self._not_valid_before is not None + and time < self._not_valid_before + ): + raise ValueError( + "The not valid after date must be after the not valid before " + "date." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + time, + self._extensions, + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateBuilder: + """ + Adds an X.509 extension to the certificate. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + [*self._extensions, extension], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ecdsa_deterministic: bool | None = None, + ) -> Certificate: + """ + Signs the certificate using the CA's private key. + """ + if self._subject_name is None: + raise ValueError("A certificate must have a subject name") + + if self._issuer_name is None: + raise ValueError("A certificate must have an issuer name") + + if self._serial_number is None: + raise ValueError("A certificate must have a serial number") + + if self._not_valid_before is None: + raise ValueError("A certificate must have a not valid before time") + + if self._not_valid_after is None: + raise ValueError("A certificate must have a not valid after time") + + if self._public_key is None: + raise ValueError("A certificate must have a public key") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + if ecdsa_deterministic is not None: + if not isinstance(private_key, ec.EllipticCurvePrivateKey): + raise TypeError( + "Deterministic ECDSA is only supported for EC keys" + ) + + return rust_x509.create_x509_certificate( + self, + private_key, + algorithm, + rsa_padding, + ecdsa_deterministic, + ) + + +class CertificateRevocationListBuilder: + _extensions: list[Extension[ExtensionType]] + _revoked_certificates: list[RevokedCertificate] + + def __init__( + self, + issuer_name: Name | None = None, + last_update: datetime.datetime | None = None, + next_update: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + revoked_certificates: list[RevokedCertificate] = [], + ): + self._issuer_name = issuer_name + self._last_update = last_update + self._next_update = next_update + self._extensions = extensions + self._revoked_certificates = revoked_certificates + + def issuer_name( + self, issuer_name: Name + ) -> CertificateRevocationListBuilder: + if not isinstance(issuer_name, Name): + raise TypeError("Expecting x509.Name object.") + if self._issuer_name is not None: + raise ValueError("The issuer name may only be set once.") + return CertificateRevocationListBuilder( + issuer_name, + self._last_update, + self._next_update, + self._extensions, + self._revoked_certificates, + ) + + def last_update( + self, last_update: datetime.datetime + ) -> CertificateRevocationListBuilder: + if not isinstance(last_update, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._last_update is not None: + raise ValueError("Last update may only be set once.") + last_update = _convert_to_naive_utc_time(last_update) + if last_update < _EARLIEST_UTC_TIME: + raise ValueError( + "The last update date must be on or after 1950 January 1." + ) + if self._next_update is not None and last_update > self._next_update: + raise ValueError( + "The last update date must be before the next update date." + ) + return CertificateRevocationListBuilder( + self._issuer_name, + last_update, + self._next_update, + self._extensions, + self._revoked_certificates, + ) + + def next_update( + self, next_update: datetime.datetime + ) -> CertificateRevocationListBuilder: + if not isinstance(next_update, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._next_update is not None: + raise ValueError("Last update may only be set once.") + next_update = _convert_to_naive_utc_time(next_update) + if next_update < _EARLIEST_UTC_TIME: + raise ValueError( + "The last update date must be on or after 1950 January 1." + ) + if self._last_update is not None and next_update < self._last_update: + raise ValueError( + "The next update date must be after the last update date." + ) + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + next_update, + self._extensions, + self._revoked_certificates, + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateRevocationListBuilder: + """ + Adds an X.509 extension to the certificate revocation list. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + self._next_update, + [*self._extensions, extension], + self._revoked_certificates, + ) + + def add_revoked_certificate( + self, revoked_certificate: RevokedCertificate + ) -> CertificateRevocationListBuilder: + """ + Adds a revoked certificate to the CRL. + """ + if not isinstance(revoked_certificate, RevokedCertificate): + raise TypeError("Must be an instance of RevokedCertificate") + + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + self._next_update, + self._extensions, + [*self._revoked_certificates, revoked_certificate], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ecdsa_deterministic: bool | None = None, + ) -> CertificateRevocationList: + if self._issuer_name is None: + raise ValueError("A CRL must have an issuer name") + + if self._last_update is None: + raise ValueError("A CRL must have a last update time") + + if self._next_update is None: + raise ValueError("A CRL must have a next update time") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + if ecdsa_deterministic is not None: + if not isinstance(private_key, ec.EllipticCurvePrivateKey): + raise TypeError( + "Deterministic ECDSA is only supported for EC keys" + ) + + return rust_x509.create_x509_crl( + self, + private_key, + algorithm, + rsa_padding, + ecdsa_deterministic, + ) + + +class RevokedCertificateBuilder: + def __init__( + self, + serial_number: int | None = None, + revocation_date: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + ): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + def serial_number(self, number: int) -> RevokedCertificateBuilder: + if not isinstance(number, int): + raise TypeError("Serial number must be of integral type.") + if self._serial_number is not None: + raise ValueError("The serial number may only be set once.") + if number <= 0: + raise ValueError("The serial number should be positive") + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError( + "The serial number should not be more than 159 bits." + ) + return RevokedCertificateBuilder( + number, self._revocation_date, self._extensions + ) + + def revocation_date( + self, time: datetime.datetime + ) -> RevokedCertificateBuilder: + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._revocation_date is not None: + raise ValueError("The revocation date may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The revocation date must be on or after 1950 January 1." + ) + return RevokedCertificateBuilder( + self._serial_number, time, self._extensions + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> RevokedCertificateBuilder: + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + return RevokedCertificateBuilder( + self._serial_number, + self._revocation_date, + [*self._extensions, extension], + ) + + def build(self, backend: typing.Any = None) -> RevokedCertificate: + if self._serial_number is None: + raise ValueError("A revoked certificate must have a serial number") + if self._revocation_date is None: + raise ValueError( + "A revoked certificate must have a revocation date" + ) + return _RawRevokedCertificate( + self._serial_number, + self._revocation_date, + Extensions(self._extensions), + ) + + +def random_serial_number() -> int: + return int.from_bytes(os.urandom(20), "big") >> 1 diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/certificate_transparency.py b/.venv/lib/python3.9/site-packages/cryptography/x509/certificate_transparency.py new file mode 100644 index 0000000..fb66cc6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/x509/certificate_transparency.py @@ -0,0 +1,35 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 + + +class LogEntryType(utils.Enum): + X509_CERTIFICATE = 0 + PRE_CERTIFICATE = 1 + + +class Version(utils.Enum): + v1 = 0 + + +class SignatureAlgorithm(utils.Enum): + """ + Signature algorithms that are valid for SCTs. + + These are exactly the same as SignatureAlgorithm in RFC 5246 (TLS 1.2). + + See: + """ + + ANONYMOUS = 0 + RSA = 1 + DSA = 2 + ECDSA = 3 + + +SignedCertificateTimestamp = rust_x509.Sct diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/extensions.py b/.venv/lib/python3.9/site-packages/cryptography/x509/extensions.py new file mode 100644 index 0000000..dfa472d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/x509/extensions.py @@ -0,0 +1,2528 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import datetime +import hashlib +import ipaddress +import typing +from collections.abc import Iterable, Iterator + +from cryptography import utils +from cryptography.hazmat.bindings._rust import asn1 +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPublicKeyTypes, + CertificatePublicKeyTypes, +) +from cryptography.x509.certificate_transparency import ( + SignedCertificateTimestamp, +) +from cryptography.x509.general_name import ( + DirectoryName, + DNSName, + GeneralName, + IPAddress, + OtherName, + RegisteredID, + RFC822Name, + UniformResourceIdentifier, + _IPAddressTypes, +) +from cryptography.x509.name import Name, RelativeDistinguishedName +from cryptography.x509.oid import ( + CRLEntryExtensionOID, + ExtensionOID, + ObjectIdentifier, + OCSPExtensionOID, +) + +ExtensionTypeVar = typing.TypeVar( + "ExtensionTypeVar", bound="ExtensionType", covariant=True +) + + +def _key_identifier_from_public_key( + public_key: CertificatePublicKeyTypes, +) -> bytes: + if isinstance(public_key, RSAPublicKey): + data = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.PKCS1, + ) + elif isinstance(public_key, EllipticCurvePublicKey): + data = public_key.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + else: + # This is a very slow way to do this. + serialized = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.SubjectPublicKeyInfo, + ) + data = asn1.parse_spki_for_data(serialized) + + return hashlib.sha1(data).digest() + + +def _make_sequence_methods(field_name: str): + def len_method(self) -> int: + return len(getattr(self, field_name)) + + def iter_method(self): + return iter(getattr(self, field_name)) + + def getitem_method(self, idx): + return getattr(self, field_name)[idx] + + return len_method, iter_method, getitem_method + + +class DuplicateExtension(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +class ExtensionNotFound(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +class ExtensionType(metaclass=abc.ABCMeta): + oid: typing.ClassVar[ObjectIdentifier] + + def public_bytes(self) -> bytes: + """ + Serializes the extension type to DER. + """ + raise NotImplementedError( + f"public_bytes is not implemented for extension type {self!r}" + ) + + +class Extensions: + def __init__(self, extensions: Iterable[Extension[ExtensionType]]) -> None: + self._extensions = list(extensions) + + def get_extension_for_oid( + self, oid: ObjectIdentifier + ) -> Extension[ExtensionType]: + for ext in self: + if ext.oid == oid: + return ext + + raise ExtensionNotFound(f"No {oid} extension was found", oid) + + def get_extension_for_class( + self, extclass: type[ExtensionTypeVar] + ) -> Extension[ExtensionTypeVar]: + if extclass is UnrecognizedExtension: + raise TypeError( + "UnrecognizedExtension can't be used with " + "get_extension_for_class because more than one instance of the" + " class may be present." + ) + + for ext in self: + if isinstance(ext.value, extclass): + return ext + + raise ExtensionNotFound( + f"No {extclass} extension was found", extclass.oid + ) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions") + + def __repr__(self) -> str: + return f"" + + +class CRLNumber(ExtensionType): + oid = ExtensionOID.CRL_NUMBER + + def __init__(self, crl_number: int) -> None: + if not isinstance(crl_number, int): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLNumber): + return NotImplemented + + return self.crl_number == other.crl_number + + def __hash__(self) -> int: + return hash(self.crl_number) + + def __repr__(self) -> str: + return f"" + + @property + def crl_number(self) -> int: + return self._crl_number + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AuthorityKeyIdentifier(ExtensionType): + oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER + + def __init__( + self, + key_identifier: bytes | None, + authority_cert_issuer: Iterable[GeneralName] | None, + authority_cert_serial_number: int | None, + ) -> None: + if (authority_cert_issuer is None) != ( + authority_cert_serial_number is None + ): + raise ValueError( + "authority_cert_issuer and authority_cert_serial_number " + "must both be present or both None" + ) + + if authority_cert_issuer is not None: + authority_cert_issuer = list(authority_cert_issuer) + if not all( + isinstance(x, GeneralName) for x in authority_cert_issuer + ): + raise TypeError( + "authority_cert_issuer must be a list of GeneralName " + "objects" + ) + + if authority_cert_serial_number is not None and not isinstance( + authority_cert_serial_number, int + ): + raise TypeError("authority_cert_serial_number must be an integer") + + self._key_identifier = key_identifier + self._authority_cert_issuer = authority_cert_issuer + self._authority_cert_serial_number = authority_cert_serial_number + + # This takes a subset of CertificatePublicKeyTypes because an issuer + # cannot have an X25519/X448 key. This introduces some unfortunate + # asymmetry that requires typing users to explicitly + # narrow their type, but we should make this accurate and not just + # convenient. + @classmethod + def from_issuer_public_key( + cls, public_key: CertificateIssuerPublicKeyTypes + ) -> AuthorityKeyIdentifier: + digest = _key_identifier_from_public_key(public_key) + return cls( + key_identifier=digest, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ) + + @classmethod + def from_issuer_subject_key_identifier( + cls, ski: SubjectKeyIdentifier + ) -> AuthorityKeyIdentifier: + return cls( + key_identifier=ski.digest, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AuthorityKeyIdentifier): + return NotImplemented + + return ( + self.key_identifier == other.key_identifier + and self.authority_cert_issuer == other.authority_cert_issuer + and self.authority_cert_serial_number + == other.authority_cert_serial_number + ) + + def __hash__(self) -> int: + if self.authority_cert_issuer is None: + aci = None + else: + aci = tuple(self.authority_cert_issuer) + return hash( + (self.key_identifier, aci, self.authority_cert_serial_number) + ) + + @property + def key_identifier(self) -> bytes | None: + return self._key_identifier + + @property + def authority_cert_issuer( + self, + ) -> list[GeneralName] | None: + return self._authority_cert_issuer + + @property + def authority_cert_serial_number(self) -> int | None: + return self._authority_cert_serial_number + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SubjectKeyIdentifier(ExtensionType): + oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER + + def __init__(self, digest: bytes) -> None: + self._digest = digest + + @classmethod + def from_public_key( + cls, public_key: CertificatePublicKeyTypes + ) -> SubjectKeyIdentifier: + return cls(_key_identifier_from_public_key(public_key)) + + @property + def digest(self) -> bytes: + return self._digest + + @property + def key_identifier(self) -> bytes: + return self._digest + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectKeyIdentifier): + return NotImplemented + + return constant_time.bytes_eq(self.digest, other.digest) + + def __hash__(self) -> int: + return hash(self.digest) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AuthorityInformationAccess(ExtensionType): + oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS + + def __init__(self, descriptions: Iterable[AccessDescription]) -> None: + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AuthorityInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __hash__(self) -> int: + return hash(tuple(self._descriptions)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SubjectInformationAccess(ExtensionType): + oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS + + def __init__(self, descriptions: Iterable[AccessDescription]) -> None: + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __hash__(self) -> int: + return hash(tuple(self._descriptions)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AccessDescription: + def __init__( + self, access_method: ObjectIdentifier, access_location: GeneralName + ) -> None: + if not isinstance(access_method, ObjectIdentifier): + raise TypeError("access_method must be an ObjectIdentifier") + + if not isinstance(access_location, GeneralName): + raise TypeError("access_location must be a GeneralName") + + self._access_method = access_method + self._access_location = access_location + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AccessDescription): + return NotImplemented + + return ( + self.access_method == other.access_method + and self.access_location == other.access_location + ) + + def __hash__(self) -> int: + return hash((self.access_method, self.access_location)) + + @property + def access_method(self) -> ObjectIdentifier: + return self._access_method + + @property + def access_location(self) -> GeneralName: + return self._access_location + + +class BasicConstraints(ExtensionType): + oid = ExtensionOID.BASIC_CONSTRAINTS + + def __init__(self, ca: bool, path_length: int | None) -> None: + if not isinstance(ca, bool): + raise TypeError("ca must be a boolean value") + + if path_length is not None and not ca: + raise ValueError("path_length must be None when ca is False") + + if path_length is not None and ( + not isinstance(path_length, int) or path_length < 0 + ): + raise TypeError( + "path_length must be a non-negative integer or None" + ) + + self._ca = ca + self._path_length = path_length + + @property + def ca(self) -> bool: + return self._ca + + @property + def path_length(self) -> int | None: + return self._path_length + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, BasicConstraints): + return NotImplemented + + return self.ca == other.ca and self.path_length == other.path_length + + def __hash__(self) -> int: + return hash((self.ca, self.path_length)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class DeltaCRLIndicator(ExtensionType): + oid = ExtensionOID.DELTA_CRL_INDICATOR + + def __init__(self, crl_number: int) -> None: + if not isinstance(crl_number, int): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + @property + def crl_number(self) -> int: + return self._crl_number + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DeltaCRLIndicator): + return NotImplemented + + return self.crl_number == other.crl_number + + def __hash__(self) -> int: + return hash(self.crl_number) + + def __repr__(self) -> str: + return f"" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CRLDistributionPoints(ExtensionType): + oid = ExtensionOID.CRL_DISTRIBUTION_POINTS + + def __init__( + self, distribution_points: Iterable[DistributionPoint] + ) -> None: + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLDistributionPoints): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __hash__(self) -> int: + return hash(tuple(self._distribution_points)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class FreshestCRL(ExtensionType): + oid = ExtensionOID.FRESHEST_CRL + + def __init__( + self, distribution_points: Iterable[DistributionPoint] + ) -> None: + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, FreshestCRL): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __hash__(self) -> int: + return hash(tuple(self._distribution_points)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class DistributionPoint: + def __init__( + self, + full_name: Iterable[GeneralName] | None, + relative_name: RelativeDistinguishedName | None, + reasons: frozenset[ReasonFlags] | None, + crl_issuer: Iterable[GeneralName] | None, + ) -> None: + if full_name and relative_name: + raise ValueError( + "You cannot provide both full_name and relative_name, at " + "least one must be None." + ) + if not full_name and not relative_name and not crl_issuer: + raise ValueError( + "Either full_name, relative_name or crl_issuer must be " + "provided." + ) + + if full_name is not None: + full_name = list(full_name) + if not all(isinstance(x, GeneralName) for x in full_name): + raise TypeError( + "full_name must be a list of GeneralName objects" + ) + + if relative_name: + if not isinstance(relative_name, RelativeDistinguishedName): + raise TypeError( + "relative_name must be a RelativeDistinguishedName" + ) + + if crl_issuer is not None: + crl_issuer = list(crl_issuer) + if not all(isinstance(x, GeneralName) for x in crl_issuer): + raise TypeError( + "crl_issuer must be None or a list of general names" + ) + + if reasons and ( + not isinstance(reasons, frozenset) + or not all(isinstance(x, ReasonFlags) for x in reasons) + ): + raise TypeError("reasons must be None or frozenset of ReasonFlags") + + if reasons and ( + ReasonFlags.unspecified in reasons + or ReasonFlags.remove_from_crl in reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in a " + "DistributionPoint" + ) + + self._full_name = full_name + self._relative_name = relative_name + self._reasons = reasons + self._crl_issuer = crl_issuer + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name + and self.relative_name == other.relative_name + and self.reasons == other.reasons + and self.crl_issuer == other.crl_issuer + ) + + def __hash__(self) -> int: + if self.full_name is not None: + fn: tuple[GeneralName, ...] | None = tuple(self.full_name) + else: + fn = None + + if self.crl_issuer is not None: + crl_issuer: tuple[GeneralName, ...] | None = tuple(self.crl_issuer) + else: + crl_issuer = None + + return hash((fn, self.relative_name, self.reasons, crl_issuer)) + + @property + def full_name(self) -> list[GeneralName] | None: + return self._full_name + + @property + def relative_name(self) -> RelativeDistinguishedName | None: + return self._relative_name + + @property + def reasons(self) -> frozenset[ReasonFlags] | None: + return self._reasons + + @property + def crl_issuer(self) -> list[GeneralName] | None: + return self._crl_issuer + + +class ReasonFlags(utils.Enum): + unspecified = "unspecified" + key_compromise = "keyCompromise" + ca_compromise = "cACompromise" + affiliation_changed = "affiliationChanged" + superseded = "superseded" + cessation_of_operation = "cessationOfOperation" + certificate_hold = "certificateHold" + privilege_withdrawn = "privilegeWithdrawn" + aa_compromise = "aACompromise" + remove_from_crl = "removeFromCRL" + + +# These are distribution point bit string mappings. Not to be confused with +# CRLReason reason flags bit string mappings. +# ReasonFlags ::= BIT STRING { +# unused (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# privilegeWithdrawn (7), +# aACompromise (8) } +_REASON_BIT_MAPPING = { + 1: ReasonFlags.key_compromise, + 2: ReasonFlags.ca_compromise, + 3: ReasonFlags.affiliation_changed, + 4: ReasonFlags.superseded, + 5: ReasonFlags.cessation_of_operation, + 6: ReasonFlags.certificate_hold, + 7: ReasonFlags.privilege_withdrawn, + 8: ReasonFlags.aa_compromise, +} + +_CRLREASONFLAGS = { + ReasonFlags.key_compromise: 1, + ReasonFlags.ca_compromise: 2, + ReasonFlags.affiliation_changed: 3, + ReasonFlags.superseded: 4, + ReasonFlags.cessation_of_operation: 5, + ReasonFlags.certificate_hold: 6, + ReasonFlags.privilege_withdrawn: 7, + ReasonFlags.aa_compromise: 8, +} + +# CRLReason ::= ENUMERATED { +# unspecified (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# -- value 7 is not used +# removeFromCRL (8), +# privilegeWithdrawn (9), +# aACompromise (10) } +_CRL_ENTRY_REASON_ENUM_TO_CODE = { + ReasonFlags.unspecified: 0, + ReasonFlags.key_compromise: 1, + ReasonFlags.ca_compromise: 2, + ReasonFlags.affiliation_changed: 3, + ReasonFlags.superseded: 4, + ReasonFlags.cessation_of_operation: 5, + ReasonFlags.certificate_hold: 6, + ReasonFlags.remove_from_crl: 8, + ReasonFlags.privilege_withdrawn: 9, + ReasonFlags.aa_compromise: 10, +} + + +class PolicyConstraints(ExtensionType): + oid = ExtensionOID.POLICY_CONSTRAINTS + + def __init__( + self, + require_explicit_policy: int | None, + inhibit_policy_mapping: int | None, + ) -> None: + if require_explicit_policy is not None and not isinstance( + require_explicit_policy, int + ): + raise TypeError( + "require_explicit_policy must be a non-negative integer or " + "None" + ) + + if inhibit_policy_mapping is not None and not isinstance( + inhibit_policy_mapping, int + ): + raise TypeError( + "inhibit_policy_mapping must be a non-negative integer or None" + ) + + if inhibit_policy_mapping is None and require_explicit_policy is None: + raise ValueError( + "At least one of require_explicit_policy and " + "inhibit_policy_mapping must not be None" + ) + + self._require_explicit_policy = require_explicit_policy + self._inhibit_policy_mapping = inhibit_policy_mapping + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PolicyConstraints): + return NotImplemented + + return ( + self.require_explicit_policy == other.require_explicit_policy + and self.inhibit_policy_mapping == other.inhibit_policy_mapping + ) + + def __hash__(self) -> int: + return hash( + (self.require_explicit_policy, self.inhibit_policy_mapping) + ) + + @property + def require_explicit_policy(self) -> int | None: + return self._require_explicit_policy + + @property + def inhibit_policy_mapping(self) -> int | None: + return self._inhibit_policy_mapping + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CertificatePolicies(ExtensionType): + oid = ExtensionOID.CERTIFICATE_POLICIES + + def __init__(self, policies: Iterable[PolicyInformation]) -> None: + policies = list(policies) + if not all(isinstance(x, PolicyInformation) for x in policies): + raise TypeError( + "Every item in the policies list must be a PolicyInformation" + ) + + self._policies = policies + + __len__, __iter__, __getitem__ = _make_sequence_methods("_policies") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CertificatePolicies): + return NotImplemented + + return self._policies == other._policies + + def __hash__(self) -> int: + return hash(tuple(self._policies)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PolicyInformation: + def __init__( + self, + policy_identifier: ObjectIdentifier, + policy_qualifiers: Iterable[str | UserNotice] | None, + ) -> None: + if not isinstance(policy_identifier, ObjectIdentifier): + raise TypeError("policy_identifier must be an ObjectIdentifier") + + self._policy_identifier = policy_identifier + + if policy_qualifiers is not None: + policy_qualifiers = list(policy_qualifiers) + if not all( + isinstance(x, (str, UserNotice)) for x in policy_qualifiers + ): + raise TypeError( + "policy_qualifiers must be a list of strings and/or " + "UserNotice objects or None" + ) + + self._policy_qualifiers = policy_qualifiers + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PolicyInformation): + return NotImplemented + + return ( + self.policy_identifier == other.policy_identifier + and self.policy_qualifiers == other.policy_qualifiers + ) + + def __hash__(self) -> int: + if self.policy_qualifiers is not None: + pq = tuple(self.policy_qualifiers) + else: + pq = None + + return hash((self.policy_identifier, pq)) + + @property + def policy_identifier(self) -> ObjectIdentifier: + return self._policy_identifier + + @property + def policy_qualifiers( + self, + ) -> list[str | UserNotice] | None: + return self._policy_qualifiers + + +class UserNotice: + def __init__( + self, + notice_reference: NoticeReference | None, + explicit_text: str | None, + ) -> None: + if notice_reference and not isinstance( + notice_reference, NoticeReference + ): + raise TypeError( + "notice_reference must be None or a NoticeReference" + ) + + self._notice_reference = notice_reference + self._explicit_text = explicit_text + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UserNotice): + return NotImplemented + + return ( + self.notice_reference == other.notice_reference + and self.explicit_text == other.explicit_text + ) + + def __hash__(self) -> int: + return hash((self.notice_reference, self.explicit_text)) + + @property + def notice_reference(self) -> NoticeReference | None: + return self._notice_reference + + @property + def explicit_text(self) -> str | None: + return self._explicit_text + + +class NoticeReference: + def __init__( + self, + organization: str | None, + notice_numbers: Iterable[int], + ) -> None: + self._organization = organization + notice_numbers = list(notice_numbers) + if not all(isinstance(x, int) for x in notice_numbers): + raise TypeError("notice_numbers must be a list of integers") + + self._notice_numbers = notice_numbers + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NoticeReference): + return NotImplemented + + return ( + self.organization == other.organization + and self.notice_numbers == other.notice_numbers + ) + + def __hash__(self) -> int: + return hash((self.organization, tuple(self.notice_numbers))) + + @property + def organization(self) -> str | None: + return self._organization + + @property + def notice_numbers(self) -> list[int]: + return self._notice_numbers + + +class ExtendedKeyUsage(ExtensionType): + oid = ExtensionOID.EXTENDED_KEY_USAGE + + def __init__(self, usages: Iterable[ObjectIdentifier]) -> None: + usages = list(usages) + if not all(isinstance(x, ObjectIdentifier) for x in usages): + raise TypeError( + "Every item in the usages list must be an ObjectIdentifier" + ) + + self._usages = usages + + __len__, __iter__, __getitem__ = _make_sequence_methods("_usages") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ExtendedKeyUsage): + return NotImplemented + + return self._usages == other._usages + + def __hash__(self) -> int: + return hash(tuple(self._usages)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPNoCheck(ExtensionType): + oid = ExtensionOID.OCSP_NO_CHECK + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPNoCheck): + return NotImplemented + + return True + + def __hash__(self) -> int: + return hash(OCSPNoCheck) + + def __repr__(self) -> str: + return "" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrecertPoison(ExtensionType): + oid = ExtensionOID.PRECERT_POISON + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrecertPoison): + return NotImplemented + + return True + + def __hash__(self) -> int: + return hash(PrecertPoison) + + def __repr__(self) -> str: + return "" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class TLSFeature(ExtensionType): + oid = ExtensionOID.TLS_FEATURE + + def __init__(self, features: Iterable[TLSFeatureType]) -> None: + features = list(features) + if ( + not all(isinstance(x, TLSFeatureType) for x in features) + or len(features) == 0 + ): + raise TypeError( + "features must be a list of elements from the TLSFeatureType " + "enum" + ) + + self._features = features + + __len__, __iter__, __getitem__ = _make_sequence_methods("_features") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TLSFeature): + return NotImplemented + + return self._features == other._features + + def __hash__(self) -> int: + return hash(tuple(self._features)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class TLSFeatureType(utils.Enum): + # status_request is defined in RFC 6066 and is used for what is commonly + # called OCSP Must-Staple when present in the TLS Feature extension in an + # X.509 certificate. + status_request = 5 + # status_request_v2 is defined in RFC 6961 and allows multiple OCSP + # responses to be provided. It is not currently in use by clients or + # servers. + status_request_v2 = 17 + + +_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType} + + +class InhibitAnyPolicy(ExtensionType): + oid = ExtensionOID.INHIBIT_ANY_POLICY + + def __init__(self, skip_certs: int) -> None: + if not isinstance(skip_certs, int): + raise TypeError("skip_certs must be an integer") + + if skip_certs < 0: + raise ValueError("skip_certs must be a non-negative integer") + + self._skip_certs = skip_certs + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, InhibitAnyPolicy): + return NotImplemented + + return self.skip_certs == other.skip_certs + + def __hash__(self) -> int: + return hash(self.skip_certs) + + @property + def skip_certs(self) -> int: + return self._skip_certs + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class KeyUsage(ExtensionType): + oid = ExtensionOID.KEY_USAGE + + def __init__( + self, + digital_signature: bool, + content_commitment: bool, + key_encipherment: bool, + data_encipherment: bool, + key_agreement: bool, + key_cert_sign: bool, + crl_sign: bool, + encipher_only: bool, + decipher_only: bool, + ) -> None: + if not key_agreement and (encipher_only or decipher_only): + raise ValueError( + "encipher_only and decipher_only can only be true when " + "key_agreement is true" + ) + + self._digital_signature = digital_signature + self._content_commitment = content_commitment + self._key_encipherment = key_encipherment + self._data_encipherment = data_encipherment + self._key_agreement = key_agreement + self._key_cert_sign = key_cert_sign + self._crl_sign = crl_sign + self._encipher_only = encipher_only + self._decipher_only = decipher_only + + @property + def digital_signature(self) -> bool: + return self._digital_signature + + @property + def content_commitment(self) -> bool: + return self._content_commitment + + @property + def key_encipherment(self) -> bool: + return self._key_encipherment + + @property + def data_encipherment(self) -> bool: + return self._data_encipherment + + @property + def key_agreement(self) -> bool: + return self._key_agreement + + @property + def key_cert_sign(self) -> bool: + return self._key_cert_sign + + @property + def crl_sign(self) -> bool: + return self._crl_sign + + @property + def encipher_only(self) -> bool: + if not self.key_agreement: + raise ValueError( + "encipher_only is undefined unless key_agreement is true" + ) + else: + return self._encipher_only + + @property + def decipher_only(self) -> bool: + if not self.key_agreement: + raise ValueError( + "decipher_only is undefined unless key_agreement is true" + ) + else: + return self._decipher_only + + def __repr__(self) -> str: + try: + encipher_only = self.encipher_only + decipher_only = self.decipher_only + except ValueError: + # Users found None confusing because even though encipher/decipher + # have no meaning unless key_agreement is true, to construct an + # instance of the class you still need to pass False. + encipher_only = False + decipher_only = False + + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, KeyUsage): + return NotImplemented + + return ( + self.digital_signature == other.digital_signature + and self.content_commitment == other.content_commitment + and self.key_encipherment == other.key_encipherment + and self.data_encipherment == other.data_encipherment + and self.key_agreement == other.key_agreement + and self.key_cert_sign == other.key_cert_sign + and self.crl_sign == other.crl_sign + and self._encipher_only == other._encipher_only + and self._decipher_only == other._decipher_only + ) + + def __hash__(self) -> int: + return hash( + ( + self.digital_signature, + self.content_commitment, + self.key_encipherment, + self.data_encipherment, + self.key_agreement, + self.key_cert_sign, + self.crl_sign, + self._encipher_only, + self._decipher_only, + ) + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrivateKeyUsagePeriod(ExtensionType): + oid = ExtensionOID.PRIVATE_KEY_USAGE_PERIOD + + def __init__( + self, + not_before: datetime.datetime | None, + not_after: datetime.datetime | None, + ) -> None: + if ( + not isinstance(not_before, datetime.datetime) + and not_before is not None + ): + raise TypeError("not_before must be a datetime.datetime or None") + + if ( + not isinstance(not_after, datetime.datetime) + and not_after is not None + ): + raise TypeError("not_after must be a datetime.datetime or None") + + if not_before is None and not_after is None: + raise ValueError( + "At least one of not_before and not_after must not be None" + ) + + if ( + not_before is not None + and not_after is not None + and not_before > not_after + ): + raise ValueError("not_before must be before not_after") + + self._not_before = not_before + self._not_after = not_after + + @property + def not_before(self) -> datetime.datetime | None: + return self._not_before + + @property + def not_after(self) -> datetime.datetime | None: + return self._not_after + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrivateKeyUsagePeriod): + return NotImplemented + + return ( + self.not_before == other.not_before + and self.not_after == other.not_after + ) + + def __hash__(self) -> int: + return hash((self.not_before, self.not_after)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class NameConstraints(ExtensionType): + oid = ExtensionOID.NAME_CONSTRAINTS + + def __init__( + self, + permitted_subtrees: Iterable[GeneralName] | None, + excluded_subtrees: Iterable[GeneralName] | None, + ) -> None: + if permitted_subtrees is not None: + permitted_subtrees = list(permitted_subtrees) + if not permitted_subtrees: + raise ValueError( + "permitted_subtrees must be a non-empty list or None" + ) + if not all(isinstance(x, GeneralName) for x in permitted_subtrees): + raise TypeError( + "permitted_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_tree(permitted_subtrees) + + if excluded_subtrees is not None: + excluded_subtrees = list(excluded_subtrees) + if not excluded_subtrees: + raise ValueError( + "excluded_subtrees must be a non-empty list or None" + ) + if not all(isinstance(x, GeneralName) for x in excluded_subtrees): + raise TypeError( + "excluded_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_tree(excluded_subtrees) + + if permitted_subtrees is None and excluded_subtrees is None: + raise ValueError( + "At least one of permitted_subtrees and excluded_subtrees " + "must not be None" + ) + + self._permitted_subtrees = permitted_subtrees + self._excluded_subtrees = excluded_subtrees + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NameConstraints): + return NotImplemented + + return ( + self.excluded_subtrees == other.excluded_subtrees + and self.permitted_subtrees == other.permitted_subtrees + ) + + def _validate_tree(self, tree: Iterable[GeneralName]) -> None: + self._validate_ip_name(tree) + self._validate_dns_name(tree) + + def _validate_ip_name(self, tree: Iterable[GeneralName]) -> None: + if any( + isinstance(name, IPAddress) + and not isinstance( + name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) + ) + for name in tree + ): + raise TypeError( + "IPAddress name constraints must be an IPv4Network or" + " IPv6Network object" + ) + + def _validate_dns_name(self, tree: Iterable[GeneralName]) -> None: + if any( + isinstance(name, DNSName) and "*" in name.value for name in tree + ): + raise ValueError( + "DNSName name constraints must not contain the '*' wildcard" + " character" + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + def __hash__(self) -> int: + if self.permitted_subtrees is not None: + ps: tuple[GeneralName, ...] | None = tuple(self.permitted_subtrees) + else: + ps = None + + if self.excluded_subtrees is not None: + es: tuple[GeneralName, ...] | None = tuple(self.excluded_subtrees) + else: + es = None + + return hash((ps, es)) + + @property + def permitted_subtrees( + self, + ) -> list[GeneralName] | None: + return self._permitted_subtrees + + @property + def excluded_subtrees( + self, + ) -> list[GeneralName] | None: + return self._excluded_subtrees + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class Extension(typing.Generic[ExtensionTypeVar]): + def __init__( + self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar + ) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(critical, bool): + raise TypeError("critical must be a boolean value") + + self._oid = oid + self._critical = critical + self._value = value + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def critical(self) -> bool: + return self._critical + + @property + def value(self) -> ExtensionTypeVar: + return self._value + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Extension): + return NotImplemented + + return ( + self.oid == other.oid + and self.critical == other.critical + and self.value == other.value + ) + + def __hash__(self) -> int: + return hash((self.oid, self.critical, self.value)) + + +class GeneralNames: + def __init__(self, general_names: Iterable[GeneralName]) -> None: + general_names = list(general_names) + if not all(isinstance(x, GeneralName) for x in general_names): + raise TypeError( + "Every item in the general_names list must be an " + "object conforming to the GeneralName interface" + ) + + self._general_names = general_names + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + # Return the value of each GeneralName, except for OtherName instances + # which we return directly because it has two important properties not + # just one value. + objs = (i for i in self if isinstance(i, type)) + if type != OtherName: + return [i.value for i in objs] + return list(objs) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, GeneralNames): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(tuple(self._general_names)) + + +class SubjectAlternativeName(ExtensionType): + oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME + + def __init__(self, general_names: Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class IssuerAlternativeName(ExtensionType): + oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME + + def __init__(self, general_names: Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IssuerAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CertificateIssuer(ExtensionType): + oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER + + def __init__(self, general_names: Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CertificateIssuer): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CRLReason(ExtensionType): + oid = CRLEntryExtensionOID.CRL_REASON + + def __init__(self, reason: ReasonFlags) -> None: + if not isinstance(reason, ReasonFlags): + raise TypeError("reason must be an element from ReasonFlags") + + self._reason = reason + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLReason): + return NotImplemented + + return self.reason == other.reason + + def __hash__(self) -> int: + return hash(self.reason) + + @property + def reason(self) -> ReasonFlags: + return self._reason + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class InvalidityDate(ExtensionType): + oid = CRLEntryExtensionOID.INVALIDITY_DATE + + def __init__(self, invalidity_date: datetime.datetime) -> None: + if not isinstance(invalidity_date, datetime.datetime): + raise TypeError("invalidity_date must be a datetime.datetime") + + self._invalidity_date = invalidity_date + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, InvalidityDate): + return NotImplemented + + return self.invalidity_date == other.invalidity_date + + def __hash__(self) -> int: + return hash(self.invalidity_date) + + @property + def invalidity_date(self) -> datetime.datetime: + return self._invalidity_date + + @property + def invalidity_date_utc(self) -> datetime.datetime: + if self._invalidity_date.tzinfo is None: + return self._invalidity_date.replace(tzinfo=datetime.timezone.utc) + else: + return self._invalidity_date.astimezone(tz=datetime.timezone.utc) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrecertificateSignedCertificateTimestamps(ExtensionType): + oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__( + self, + signed_certificate_timestamps: Iterable[SignedCertificateTimestamp], + ) -> None: + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrecertificateSignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps + == other._signed_certificate_timestamps + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SignedCertificateTimestamps(ExtensionType): + oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__( + self, + signed_certificate_timestamps: Iterable[SignedCertificateTimestamp], + ) -> None: + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps + == other._signed_certificate_timestamps + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPNonce(ExtensionType): + oid = OCSPExtensionOID.NONCE + + def __init__(self, nonce: bytes) -> None: + if not isinstance(nonce, bytes): + raise TypeError("nonce must be bytes") + + self._nonce = nonce + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPNonce): + return NotImplemented + + return self.nonce == other.nonce + + def __hash__(self) -> int: + return hash(self.nonce) + + def __repr__(self) -> str: + return f"" + + @property + def nonce(self) -> bytes: + return self._nonce + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPAcceptableResponses(ExtensionType): + oid = OCSPExtensionOID.ACCEPTABLE_RESPONSES + + def __init__(self, responses: Iterable[ObjectIdentifier]) -> None: + responses = list(responses) + if any(not isinstance(r, ObjectIdentifier) for r in responses): + raise TypeError("All responses must be ObjectIdentifiers") + + self._responses = responses + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPAcceptableResponses): + return NotImplemented + + return self._responses == other._responses + + def __hash__(self) -> int: + return hash(tuple(self._responses)) + + def __repr__(self) -> str: + return f"" + + def __iter__(self) -> Iterator[ObjectIdentifier]: + return iter(self._responses) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class IssuingDistributionPoint(ExtensionType): + oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT + + def __init__( + self, + full_name: Iterable[GeneralName] | None, + relative_name: RelativeDistinguishedName | None, + only_contains_user_certs: bool, + only_contains_ca_certs: bool, + only_some_reasons: frozenset[ReasonFlags] | None, + indirect_crl: bool, + only_contains_attribute_certs: bool, + ) -> None: + if full_name is not None: + full_name = list(full_name) + + if only_some_reasons and ( + not isinstance(only_some_reasons, frozenset) + or not all(isinstance(x, ReasonFlags) for x in only_some_reasons) + ): + raise TypeError( + "only_some_reasons must be None or frozenset of ReasonFlags" + ) + + if only_some_reasons and ( + ReasonFlags.unspecified in only_some_reasons + or ReasonFlags.remove_from_crl in only_some_reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in an " + "IssuingDistributionPoint" + ) + + if not ( + isinstance(only_contains_user_certs, bool) + and isinstance(only_contains_ca_certs, bool) + and isinstance(indirect_crl, bool) + and isinstance(only_contains_attribute_certs, bool) + ): + raise TypeError( + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl and only_contains_attribute_certs " + "must all be boolean." + ) + + # Per RFC5280 Section 5.2.5, the Issuing Distribution Point extension + # in a CRL can have only one of onlyContainsUserCerts, + # onlyContainsCACerts, onlyContainsAttributeCerts set to TRUE. + crl_constraints = [ + only_contains_user_certs, + only_contains_ca_certs, + only_contains_attribute_certs, + ] + + if len([x for x in crl_constraints if x]) > 1: + raise ValueError( + "Only one of the following can be set to True: " + "only_contains_user_certs, only_contains_ca_certs, " + "only_contains_attribute_certs" + ) + + if not any( + [ + only_contains_user_certs, + only_contains_ca_certs, + indirect_crl, + only_contains_attribute_certs, + full_name, + relative_name, + only_some_reasons, + ] + ): + raise ValueError( + "Cannot create empty extension: " + "if only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, and only_contains_attribute_certs are all False" + ", then either full_name, relative_name, or only_some_reasons " + "must have a value." + ) + + self._only_contains_user_certs = only_contains_user_certs + self._only_contains_ca_certs = only_contains_ca_certs + self._indirect_crl = indirect_crl + self._only_contains_attribute_certs = only_contains_attribute_certs + self._only_some_reasons = only_some_reasons + self._full_name = full_name + self._relative_name = relative_name + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IssuingDistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name + and self.relative_name == other.relative_name + and self.only_contains_user_certs == other.only_contains_user_certs + and self.only_contains_ca_certs == other.only_contains_ca_certs + and self.only_some_reasons == other.only_some_reasons + and self.indirect_crl == other.indirect_crl + and self.only_contains_attribute_certs + == other.only_contains_attribute_certs + ) + + def __hash__(self) -> int: + return hash( + ( + self.full_name, + self.relative_name, + self.only_contains_user_certs, + self.only_contains_ca_certs, + self.only_some_reasons, + self.indirect_crl, + self.only_contains_attribute_certs, + ) + ) + + @property + def full_name(self) -> list[GeneralName] | None: + return self._full_name + + @property + def relative_name(self) -> RelativeDistinguishedName | None: + return self._relative_name + + @property + def only_contains_user_certs(self) -> bool: + return self._only_contains_user_certs + + @property + def only_contains_ca_certs(self) -> bool: + return self._only_contains_ca_certs + + @property + def only_some_reasons( + self, + ) -> frozenset[ReasonFlags] | None: + return self._only_some_reasons + + @property + def indirect_crl(self) -> bool: + return self._indirect_crl + + @property + def only_contains_attribute_certs(self) -> bool: + return self._only_contains_attribute_certs + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class MSCertificateTemplate(ExtensionType): + oid = ExtensionOID.MS_CERTIFICATE_TEMPLATE + + def __init__( + self, + template_id: ObjectIdentifier, + major_version: int | None, + minor_version: int | None, + ) -> None: + if not isinstance(template_id, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._template_id = template_id + if ( + major_version is not None and not isinstance(major_version, int) + ) or ( + minor_version is not None and not isinstance(minor_version, int) + ): + raise TypeError( + "major_version and minor_version must be integers or None" + ) + self._major_version = major_version + self._minor_version = minor_version + + @property + def template_id(self) -> ObjectIdentifier: + return self._template_id + + @property + def major_version(self) -> int | None: + return self._major_version + + @property + def minor_version(self) -> int | None: + return self._minor_version + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, MSCertificateTemplate): + return NotImplemented + + return ( + self.template_id == other.template_id + and self.major_version == other.major_version + and self.minor_version == other.minor_version + ) + + def __hash__(self) -> int: + return hash((self.template_id, self.major_version, self.minor_version)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class NamingAuthority: + def __init__( + self, + id: ObjectIdentifier | None, + url: str | None, + text: str | None, + ) -> None: + if id is not None and not isinstance(id, ObjectIdentifier): + raise TypeError("id must be an ObjectIdentifier") + + if url is not None and not isinstance(url, str): + raise TypeError("url must be a str") + + if text is not None and not isinstance(text, str): + raise TypeError("text must be a str") + + self._id = id + self._url = url + self._text = text + + @property + def id(self) -> ObjectIdentifier | None: + return self._id + + @property + def url(self) -> str | None: + return self._url + + @property + def text(self) -> str | None: + return self._text + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NamingAuthority): + return NotImplemented + + return ( + self.id == other.id + and self.url == other.url + and self.text == other.text + ) + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.url, + self.text, + ) + ) + + +class ProfessionInfo: + def __init__( + self, + naming_authority: NamingAuthority | None, + profession_items: Iterable[str], + profession_oids: Iterable[ObjectIdentifier] | None, + registration_number: str | None, + add_profession_info: bytes | None, + ) -> None: + if naming_authority is not None and not isinstance( + naming_authority, NamingAuthority + ): + raise TypeError("naming_authority must be a NamingAuthority") + + profession_items = list(profession_items) + if not all(isinstance(item, str) for item in profession_items): + raise TypeError( + "Every item in the profession_items list must be a str" + ) + + if profession_oids is not None: + profession_oids = list(profession_oids) + if not all( + isinstance(oid, ObjectIdentifier) for oid in profession_oids + ): + raise TypeError( + "Every item in the profession_oids list must be an " + "ObjectIdentifier" + ) + + if registration_number is not None and not isinstance( + registration_number, str + ): + raise TypeError("registration_number must be a str") + + if add_profession_info is not None and not isinstance( + add_profession_info, bytes + ): + raise TypeError("add_profession_info must be bytes") + + self._naming_authority = naming_authority + self._profession_items = profession_items + self._profession_oids = profession_oids + self._registration_number = registration_number + self._add_profession_info = add_profession_info + + @property + def naming_authority(self) -> NamingAuthority | None: + return self._naming_authority + + @property + def profession_items(self) -> list[str]: + return self._profession_items + + @property + def profession_oids(self) -> list[ObjectIdentifier] | None: + return self._profession_oids + + @property + def registration_number(self) -> str | None: + return self._registration_number + + @property + def add_profession_info(self) -> bytes | None: + return self._add_profession_info + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ProfessionInfo): + return NotImplemented + + return ( + self.naming_authority == other.naming_authority + and self.profession_items == other.profession_items + and self.profession_oids == other.profession_oids + and self.registration_number == other.registration_number + and self.add_profession_info == other.add_profession_info + ) + + def __hash__(self) -> int: + if self.profession_oids is not None: + profession_oids = tuple(self.profession_oids) + else: + profession_oids = None + return hash( + ( + self.naming_authority, + tuple(self.profession_items), + profession_oids, + self.registration_number, + self.add_profession_info, + ) + ) + + +class Admission: + def __init__( + self, + admission_authority: GeneralName | None, + naming_authority: NamingAuthority | None, + profession_infos: Iterable[ProfessionInfo], + ) -> None: + if admission_authority is not None and not isinstance( + admission_authority, GeneralName + ): + raise TypeError("admission_authority must be a GeneralName") + + if naming_authority is not None and not isinstance( + naming_authority, NamingAuthority + ): + raise TypeError("naming_authority must be a NamingAuthority") + + profession_infos = list(profession_infos) + if not all( + isinstance(info, ProfessionInfo) for info in profession_infos + ): + raise TypeError( + "Every item in the profession_infos list must be a " + "ProfessionInfo" + ) + + self._admission_authority = admission_authority + self._naming_authority = naming_authority + self._profession_infos = profession_infos + + @property + def admission_authority(self) -> GeneralName | None: + return self._admission_authority + + @property + def naming_authority(self) -> NamingAuthority | None: + return self._naming_authority + + @property + def profession_infos(self) -> list[ProfessionInfo]: + return self._profession_infos + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Admission): + return NotImplemented + + return ( + self.admission_authority == other.admission_authority + and self.naming_authority == other.naming_authority + and self.profession_infos == other.profession_infos + ) + + def __hash__(self) -> int: + return hash( + ( + self.admission_authority, + self.naming_authority, + tuple(self.profession_infos), + ) + ) + + +class Admissions(ExtensionType): + oid = ExtensionOID.ADMISSIONS + + def __init__( + self, + authority: GeneralName | None, + admissions: Iterable[Admission], + ) -> None: + if authority is not None and not isinstance(authority, GeneralName): + raise TypeError("authority must be a GeneralName") + + admissions = list(admissions) + if not all( + isinstance(admission, Admission) for admission in admissions + ): + raise TypeError( + "Every item in the contents_of_admissions list must be an " + "Admission" + ) + + self._authority = authority + self._admissions = admissions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_admissions") + + @property + def authority(self) -> GeneralName | None: + return self._authority + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Admissions): + return NotImplemented + + return ( + self.authority == other.authority + and self._admissions == other._admissions + ) + + def __hash__(self) -> int: + return hash((self.authority, tuple(self._admissions))) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class UnrecognizedExtension(ExtensionType): + def __init__(self, oid: ObjectIdentifier, value: bytes) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._oid = oid + self._value = value + + @property + def oid(self) -> ObjectIdentifier: # type: ignore[override] + return self._oid + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UnrecognizedExtension): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __hash__(self) -> int: + return hash((self.oid, self.value)) + + def public_bytes(self) -> bytes: + return self.value diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/general_name.py b/.venv/lib/python3.9/site-packages/cryptography/x509/general_name.py new file mode 100644 index 0000000..672f287 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/x509/general_name.py @@ -0,0 +1,281 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import ipaddress +import typing +from email.utils import parseaddr + +from cryptography.x509.name import Name +from cryptography.x509.oid import ObjectIdentifier + +_IPAddressTypes = typing.Union[ + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network, +] + + +class UnsupportedGeneralNameType(Exception): + pass + + +class GeneralName(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def value(self) -> typing.Any: + """ + Return the value of the object + """ + + +class RFC822Name(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "RFC822Name values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + name, address = parseaddr(value) + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> RFC822Name: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RFC822Name): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class DNSName(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "DNSName values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> DNSName: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DNSName): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class UniformResourceIdentifier(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "URI values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> UniformResourceIdentifier: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UniformResourceIdentifier): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class DirectoryName(GeneralName): + def __init__(self, value: Name) -> None: + if not isinstance(value, Name): + raise TypeError("value must be a Name") + + self._value = value + + @property + def value(self) -> Name: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DirectoryName): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class RegisteredID(GeneralName): + def __init__(self, value: ObjectIdentifier) -> None: + if not isinstance(value, ObjectIdentifier): + raise TypeError("value must be an ObjectIdentifier") + + self._value = value + + @property + def value(self) -> ObjectIdentifier: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RegisteredID): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class IPAddress(GeneralName): + def __init__(self, value: _IPAddressTypes) -> None: + if not isinstance( + value, + ( + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network, + ), + ): + raise TypeError( + "value must be an instance of ipaddress.IPv4Address, " + "ipaddress.IPv6Address, ipaddress.IPv4Network, or " + "ipaddress.IPv6Network" + ) + + self._value = value + + @property + def value(self) -> _IPAddressTypes: + return self._value + + def _packed(self) -> bytes: + if isinstance( + self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address) + ): + return self.value.packed + else: + return ( + self.value.network_address.packed + self.value.netmask.packed + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IPAddress): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class OtherName(GeneralName): + def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None: + if not isinstance(type_id, ObjectIdentifier): + raise TypeError("type_id must be an ObjectIdentifier") + if not isinstance(value, bytes): + raise TypeError("value must be a binary string") + + self._type_id = type_id + self._value = value + + @property + def type_id(self) -> ObjectIdentifier: + return self._type_id + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OtherName): + return NotImplemented + + return self.type_id == other.type_id and self.value == other.value + + def __hash__(self) -> int: + return hash((self.type_id, self.value)) diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/name.py b/.venv/lib/python3.9/site-packages/cryptography/x509/name.py new file mode 100644 index 0000000..685f921 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/x509/name.py @@ -0,0 +1,476 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import binascii +import re +import sys +import typing +import warnings +from collections.abc import Iterable, Iterator + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.x509.oid import NameOID, ObjectIdentifier + + +class _ASN1Type(utils.Enum): + BitString = 3 + OctetString = 4 + UTF8String = 12 + NumericString = 18 + PrintableString = 19 + T61String = 20 + IA5String = 22 + UTCTime = 23 + GeneralizedTime = 24 + VisibleString = 26 + UniversalString = 28 + BMPString = 30 + + +_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type} +_NAMEOID_DEFAULT_TYPE: dict[ObjectIdentifier, _ASN1Type] = { + NameOID.COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString, + NameOID.DN_QUALIFIER: _ASN1Type.PrintableString, + NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String, + NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String, +} + +# Type alias +_OidNameMap = typing.Mapping[ObjectIdentifier, str] +_NameOidMap = typing.Mapping[str, ObjectIdentifier] + +#: Short attribute names from RFC 4514: +#: https://tools.ietf.org/html/rfc4514#page-7 +_NAMEOID_TO_NAME: _OidNameMap = { + NameOID.COMMON_NAME: "CN", + NameOID.LOCALITY_NAME: "L", + NameOID.STATE_OR_PROVINCE_NAME: "ST", + NameOID.ORGANIZATION_NAME: "O", + NameOID.ORGANIZATIONAL_UNIT_NAME: "OU", + NameOID.COUNTRY_NAME: "C", + NameOID.STREET_ADDRESS: "STREET", + NameOID.DOMAIN_COMPONENT: "DC", + NameOID.USER_ID: "UID", +} +_NAME_TO_NAMEOID = {v: k for k, v in _NAMEOID_TO_NAME.items()} + +_NAMEOID_LENGTH_LIMIT = { + NameOID.COUNTRY_NAME: (2, 2), + NameOID.JURISDICTION_COUNTRY_NAME: (2, 2), + NameOID.COMMON_NAME: (1, 64), +} + + +def _escape_dn_value(val: str | bytes) -> str: + """Escape special characters in RFC4514 Distinguished Name value.""" + + if not val: + return "" + + # RFC 4514 Section 2.4 defines the value as being the # (U+0023) character + # followed by the hexadecimal encoding of the octets. + if isinstance(val, bytes): + return "#" + binascii.hexlify(val).decode("utf8") + + # See https://tools.ietf.org/html/rfc4514#section-2.4 + val = val.replace("\\", "\\\\") + val = val.replace('"', '\\"') + val = val.replace("+", "\\+") + val = val.replace(",", "\\,") + val = val.replace(";", "\\;") + val = val.replace("<", "\\<") + val = val.replace(">", "\\>") + val = val.replace("\0", "\\00") + + if val[0] in ("#", " "): + val = "\\" + val + if val[-1] == " ": + val = val[:-1] + "\\ " + + return val + + +def _unescape_dn_value(val: str) -> str: + if not val: + return "" + + # See https://tools.ietf.org/html/rfc4514#section-3 + + # special = escaped / SPACE / SHARP / EQUALS + # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE + def sub(m): + val = m.group(1) + # Regular escape + if len(val) == 1: + return val + # Hex-value scape + return chr(int(val, 16)) + + return _RFC4514NameParser._PAIR_RE.sub(sub, val) + + +NameAttributeValueType = typing.TypeVar( + "NameAttributeValueType", + typing.Union[str, bytes], + str, + bytes, + covariant=True, +) + + +class NameAttribute(typing.Generic[NameAttributeValueType]): + def __init__( + self, + oid: ObjectIdentifier, + value: NameAttributeValueType, + _type: _ASN1Type | None = None, + *, + _validate: bool = True, + ) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + if _type == _ASN1Type.BitString: + if oid != NameOID.X500_UNIQUE_IDENTIFIER: + raise TypeError( + "oid must be X500_UNIQUE_IDENTIFIER for BitString type." + ) + if not isinstance(value, bytes): + raise TypeError("value must be bytes for BitString") + elif not isinstance(value, str): + raise TypeError("value argument must be a str") + + length_limits = _NAMEOID_LENGTH_LIMIT.get(oid) + if length_limits is not None: + min_length, max_length = length_limits + assert isinstance(value, str) + c_len = len(value.encode("utf8")) + if c_len < min_length or c_len > max_length: + msg = ( + f"Attribute's length must be >= {min_length} and " + f"<= {max_length}, but it was {c_len}" + ) + if _validate is True: + raise ValueError(msg) + else: + warnings.warn(msg, stacklevel=2) + + # The appropriate ASN1 string type varies by OID and is defined across + # multiple RFCs including 2459, 3280, and 5280. In general UTF8String + # is preferred (2459), but 3280 and 5280 specify several OIDs with + # alternate types. This means when we see the sentinel value we need + # to look up whether the OID has a non-UTF8 type. If it does, set it + # to that. Otherwise, UTF8! + if _type is None: + _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String) + + if not isinstance(_type, _ASN1Type): + raise TypeError("_type must be from the _ASN1Type enum") + + self._oid = oid + self._value: NameAttributeValueType = value + self._type: _ASN1Type = _type + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def value(self) -> NameAttributeValueType: + return self._value + + @property + def rfc4514_attribute_name(self) -> str: + """ + The short attribute name (for example "CN") if available, + otherwise the OID dotted string. + """ + return _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string) + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + + Use short attribute name if available, otherwise fall back to OID + dotted string. + """ + attr_name = ( + attr_name_overrides.get(self.oid) if attr_name_overrides else None + ) + if attr_name is None: + attr_name = self.rfc4514_attribute_name + + return f"{attr_name}={_escape_dn_value(self.value)}" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NameAttribute): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __hash__(self) -> int: + return hash((self.oid, self.value)) + + def __repr__(self) -> str: + return f"" + + +class RelativeDistinguishedName: + def __init__(self, attributes: Iterable[NameAttribute]): + attributes = list(attributes) + if not attributes: + raise ValueError("a relative distinguished name cannot be empty") + if not all(isinstance(x, NameAttribute) for x in attributes): + raise TypeError("attributes must be an iterable of NameAttribute") + + # Keep list and frozenset to preserve attribute order where it matters + self._attributes = attributes + self._attribute_set = frozenset(attributes) + + if len(self._attribute_set) != len(attributes): + raise ValueError("duplicate attributes are not allowed") + + def get_attributes_for_oid( + self, + oid: ObjectIdentifier, + ) -> list[NameAttribute[str | bytes]]: + return [i for i in self if i.oid == oid] + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + + Within each RDN, attributes are joined by '+', although that is rarely + used in certificates. + """ + return "+".join( + attr.rfc4514_string(attr_name_overrides) + for attr in self._attributes + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RelativeDistinguishedName): + return NotImplemented + + return self._attribute_set == other._attribute_set + + def __hash__(self) -> int: + return hash(self._attribute_set) + + def __iter__(self) -> Iterator[NameAttribute]: + return iter(self._attributes) + + def __len__(self) -> int: + return len(self._attributes) + + def __repr__(self) -> str: + return f"" + + +class Name: + @typing.overload + def __init__(self, attributes: Iterable[NameAttribute]) -> None: ... + + @typing.overload + def __init__( + self, attributes: Iterable[RelativeDistinguishedName] + ) -> None: ... + + def __init__( + self, + attributes: Iterable[NameAttribute | RelativeDistinguishedName], + ) -> None: + attributes = list(attributes) + if all(isinstance(x, NameAttribute) for x in attributes): + self._attributes = [ + RelativeDistinguishedName([typing.cast(NameAttribute, x)]) + for x in attributes + ] + elif all(isinstance(x, RelativeDistinguishedName) for x in attributes): + self._attributes = typing.cast( + typing.List[RelativeDistinguishedName], attributes + ) + else: + raise TypeError( + "attributes must be a list of NameAttribute" + " or a list RelativeDistinguishedName" + ) + + @classmethod + def from_rfc4514_string( + cls, + data: str, + attr_name_overrides: _NameOidMap | None = None, + ) -> Name: + return _RFC4514NameParser(data, attr_name_overrides or {}).parse() + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + For example 'CN=foobar.com,O=Foo Corp,C=US' + + An X.509 name is a two-level structure: a list of sets of attributes. + Each list element is separated by ',' and within each list element, set + elements are separated by '+'. The latter is almost never used in + real world certificates. According to RFC4514 section 2.1 the + RDNSequence must be reversed when converting to string representation. + """ + return ",".join( + attr.rfc4514_string(attr_name_overrides) + for attr in reversed(self._attributes) + ) + + def get_attributes_for_oid( + self, + oid: ObjectIdentifier, + ) -> list[NameAttribute[str | bytes]]: + return [i for i in self if i.oid == oid] + + @property + def rdns(self) -> list[RelativeDistinguishedName]: + return self._attributes + + def public_bytes(self, backend: typing.Any = None) -> bytes: + return rust_x509.encode_name_bytes(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Name): + return NotImplemented + + return self._attributes == other._attributes + + def __hash__(self) -> int: + # TODO: this is relatively expensive, if this looks like a bottleneck + # for you, consider optimizing! + return hash(tuple(self._attributes)) + + def __iter__(self) -> Iterator[NameAttribute]: + for rdn in self._attributes: + yield from rdn + + def __len__(self) -> int: + return sum(len(rdn) for rdn in self._attributes) + + def __repr__(self) -> str: + rdns = ",".join(attr.rfc4514_string() for attr in self._attributes) + return f"" + + +class _RFC4514NameParser: + _OID_RE = re.compile(r"(0|([1-9]\d*))(\.(0|([1-9]\d*)))+") + _DESCR_RE = re.compile(r"[a-zA-Z][a-zA-Z\d-]*") + + _PAIR = r"\\([\\ #=\"\+,;<>]|[\da-zA-Z]{2})" + _PAIR_RE = re.compile(_PAIR) + _LUTF1 = r"[\x01-\x1f\x21\x24-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _SUTF1 = r"[\x01-\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _TUTF1 = r"[\x01-\x1F\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _UTFMB = rf"[\x80-{chr(sys.maxunicode)}]" + _LEADCHAR = rf"{_LUTF1}|{_UTFMB}" + _STRINGCHAR = rf"{_SUTF1}|{_UTFMB}" + _TRAILCHAR = rf"{_TUTF1}|{_UTFMB}" + _STRING_RE = re.compile( + rf""" + ( + ({_LEADCHAR}|{_PAIR}) + ( + ({_STRINGCHAR}|{_PAIR})* + ({_TRAILCHAR}|{_PAIR}) + )? + )? + """, + re.VERBOSE, + ) + _HEXSTRING_RE = re.compile(r"#([\da-zA-Z]{2})+") + + def __init__(self, data: str, attr_name_overrides: _NameOidMap) -> None: + self._data = data + self._idx = 0 + + self._attr_name_overrides = attr_name_overrides + + def _has_data(self) -> bool: + return self._idx < len(self._data) + + def _peek(self) -> str | None: + if self._has_data(): + return self._data[self._idx] + return None + + def _read_char(self, ch: str) -> None: + if self._peek() != ch: + raise ValueError + self._idx += 1 + + def _read_re(self, pat) -> str: + match = pat.match(self._data, pos=self._idx) + if match is None: + raise ValueError + val = match.group() + self._idx += len(val) + return val + + def parse(self) -> Name: + """ + Parses the `data` string and converts it to a Name. + + According to RFC4514 section 2.1 the RDNSequence must be + reversed when converting to string representation. So, when + we parse it, we need to reverse again to get the RDNs on the + correct order. + """ + + if not self._has_data(): + return Name([]) + + rdns = [self._parse_rdn()] + + while self._has_data(): + self._read_char(",") + rdns.append(self._parse_rdn()) + + return Name(reversed(rdns)) + + def _parse_rdn(self) -> RelativeDistinguishedName: + nas = [self._parse_na()] + while self._peek() == "+": + self._read_char("+") + nas.append(self._parse_na()) + + return RelativeDistinguishedName(nas) + + def _parse_na(self) -> NameAttribute: + try: + oid_value = self._read_re(self._OID_RE) + except ValueError: + name = self._read_re(self._DESCR_RE) + oid = self._attr_name_overrides.get( + name, _NAME_TO_NAMEOID.get(name) + ) + if oid is None: + raise ValueError + else: + oid = ObjectIdentifier(oid_value) + + self._read_char("=") + if self._peek() == "#": + value = self._read_re(self._HEXSTRING_RE) + value = binascii.unhexlify(value[1:]).decode() + else: + raw_value = self._read_re(self._STRING_RE) + value = _unescape_dn_value(raw_value) + + return NameAttribute(oid, value) diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/ocsp.py b/.venv/lib/python3.9/site-packages/cryptography/x509/ocsp.py new file mode 100644 index 0000000..f61ed80 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/x509/ocsp.py @@ -0,0 +1,379 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import datetime +from collections.abc import Iterable + +from cryptography import utils, x509 +from cryptography.hazmat.bindings._rust import ocsp +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPrivateKeyTypes, +) +from cryptography.x509.base import _reject_duplicate_extension + + +class OCSPResponderEncoding(utils.Enum): + HASH = "By Hash" + NAME = "By Name" + + +class OCSPResponseStatus(utils.Enum): + SUCCESSFUL = 0 + MALFORMED_REQUEST = 1 + INTERNAL_ERROR = 2 + TRY_LATER = 3 + SIG_REQUIRED = 5 + UNAUTHORIZED = 6 + + +_ALLOWED_HASHES = ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, +) + + +def _verify_algorithm(algorithm: hashes.HashAlgorithm) -> None: + if not isinstance(algorithm, _ALLOWED_HASHES): + raise ValueError( + "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512" + ) + + +class OCSPCertStatus(utils.Enum): + GOOD = 0 + REVOKED = 1 + UNKNOWN = 2 + + +class _SingleResponse: + def __init__( + self, + resp: tuple[x509.Certificate, x509.Certificate] | None, + resp_hash: tuple[bytes, bytes, int] | None, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ): + _verify_algorithm(algorithm) + if not isinstance(this_update, datetime.datetime): + raise TypeError("this_update must be a datetime object") + if next_update is not None and not isinstance( + next_update, datetime.datetime + ): + raise TypeError("next_update must be a datetime object or None") + + self._resp = resp + self._resp_hash = resp_hash + self._algorithm = algorithm + self._this_update = this_update + self._next_update = next_update + + if not isinstance(cert_status, OCSPCertStatus): + raise TypeError( + "cert_status must be an item from the OCSPCertStatus enum" + ) + if cert_status is not OCSPCertStatus.REVOKED: + if revocation_time is not None: + raise ValueError( + "revocation_time can only be provided if the certificate " + "is revoked" + ) + if revocation_reason is not None: + raise ValueError( + "revocation_reason can only be provided if the certificate" + " is revoked" + ) + else: + if not isinstance(revocation_time, datetime.datetime): + raise TypeError("revocation_time must be a datetime object") + + if revocation_reason is not None and not isinstance( + revocation_reason, x509.ReasonFlags + ): + raise TypeError( + "revocation_reason must be an item from the ReasonFlags " + "enum or None" + ) + + self._cert_status = cert_status + self._revocation_time = revocation_time + self._revocation_reason = revocation_reason + + +OCSPRequest = ocsp.OCSPRequest +OCSPResponse = ocsp.OCSPResponse +OCSPSingleResponse = ocsp.OCSPSingleResponse + + +class OCSPRequestBuilder: + def __init__( + self, + request: tuple[ + x509.Certificate, x509.Certificate, hashes.HashAlgorithm + ] + | None = None, + request_hash: tuple[bytes, bytes, int, hashes.HashAlgorithm] + | None = None, + extensions: list[x509.Extension[x509.ExtensionType]] = [], + ) -> None: + self._request = request + self._request_hash = request_hash + self._extensions = extensions + + def add_certificate( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + ) -> OCSPRequestBuilder: + if self._request is not None or self._request_hash is not None: + raise ValueError("Only one certificate can be added to a request") + + _verify_algorithm(algorithm) + if not isinstance(cert, x509.Certificate) or not isinstance( + issuer, x509.Certificate + ): + raise TypeError("cert and issuer must be a Certificate") + + return OCSPRequestBuilder( + (cert, issuer, algorithm), self._request_hash, self._extensions + ) + + def add_certificate_by_hash( + self, + issuer_name_hash: bytes, + issuer_key_hash: bytes, + serial_number: int, + algorithm: hashes.HashAlgorithm, + ) -> OCSPRequestBuilder: + if self._request is not None or self._request_hash is not None: + raise ValueError("Only one certificate can be added to a request") + + if not isinstance(serial_number, int): + raise TypeError("serial_number must be an integer") + + _verify_algorithm(algorithm) + utils._check_bytes("issuer_name_hash", issuer_name_hash) + utils._check_bytes("issuer_key_hash", issuer_key_hash) + if algorithm.digest_size != len( + issuer_name_hash + ) or algorithm.digest_size != len(issuer_key_hash): + raise ValueError( + "issuer_name_hash and issuer_key_hash must be the same length " + "as the digest size of the algorithm" + ) + + return OCSPRequestBuilder( + self._request, + (issuer_name_hash, issuer_key_hash, serial_number, algorithm), + self._extensions, + ) + + def add_extension( + self, extval: x509.ExtensionType, critical: bool + ) -> OCSPRequestBuilder: + if not isinstance(extval, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPRequestBuilder( + self._request, self._request_hash, [*self._extensions, extension] + ) + + def build(self) -> OCSPRequest: + if self._request is None and self._request_hash is None: + raise ValueError("You must add a certificate before building") + + return ocsp.create_ocsp_request(self) + + +class OCSPResponseBuilder: + def __init__( + self, + response: _SingleResponse | None = None, + responder_id: tuple[x509.Certificate, OCSPResponderEncoding] + | None = None, + certs: list[x509.Certificate] | None = None, + extensions: list[x509.Extension[x509.ExtensionType]] = [], + ): + self._response = response + self._responder_id = responder_id + self._certs = certs + self._extensions = extensions + + def add_response( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ) -> OCSPResponseBuilder: + if self._response is not None: + raise ValueError("Only one response per OCSPResponse.") + + if not isinstance(cert, x509.Certificate) or not isinstance( + issuer, x509.Certificate + ): + raise TypeError("cert and issuer must be a Certificate") + + singleresp = _SingleResponse( + (cert, issuer), + None, + algorithm, + cert_status, + this_update, + next_update, + revocation_time, + revocation_reason, + ) + return OCSPResponseBuilder( + singleresp, + self._responder_id, + self._certs, + self._extensions, + ) + + def add_response_by_hash( + self, + issuer_name_hash: bytes, + issuer_key_hash: bytes, + serial_number: int, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ) -> OCSPResponseBuilder: + if self._response is not None: + raise ValueError("Only one response per OCSPResponse.") + + if not isinstance(serial_number, int): + raise TypeError("serial_number must be an integer") + + utils._check_bytes("issuer_name_hash", issuer_name_hash) + utils._check_bytes("issuer_key_hash", issuer_key_hash) + _verify_algorithm(algorithm) + if algorithm.digest_size != len( + issuer_name_hash + ) or algorithm.digest_size != len(issuer_key_hash): + raise ValueError( + "issuer_name_hash and issuer_key_hash must be the same length " + "as the digest size of the algorithm" + ) + + singleresp = _SingleResponse( + None, + (issuer_name_hash, issuer_key_hash, serial_number), + algorithm, + cert_status, + this_update, + next_update, + revocation_time, + revocation_reason, + ) + return OCSPResponseBuilder( + singleresp, + self._responder_id, + self._certs, + self._extensions, + ) + + def responder_id( + self, encoding: OCSPResponderEncoding, responder_cert: x509.Certificate + ) -> OCSPResponseBuilder: + if self._responder_id is not None: + raise ValueError("responder_id can only be set once") + if not isinstance(responder_cert, x509.Certificate): + raise TypeError("responder_cert must be a Certificate") + if not isinstance(encoding, OCSPResponderEncoding): + raise TypeError( + "encoding must be an element from OCSPResponderEncoding" + ) + + return OCSPResponseBuilder( + self._response, + (responder_cert, encoding), + self._certs, + self._extensions, + ) + + def certificates( + self, certs: Iterable[x509.Certificate] + ) -> OCSPResponseBuilder: + if self._certs is not None: + raise ValueError("certificates may only be set once") + certs = list(certs) + if len(certs) == 0: + raise ValueError("certs must not be an empty list") + if not all(isinstance(x, x509.Certificate) for x in certs): + raise TypeError("certs must be a list of Certificates") + return OCSPResponseBuilder( + self._response, + self._responder_id, + certs, + self._extensions, + ) + + def add_extension( + self, extval: x509.ExtensionType, critical: bool + ) -> OCSPResponseBuilder: + if not isinstance(extval, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPResponseBuilder( + self._response, + self._responder_id, + self._certs, + [*self._extensions, extension], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: hashes.HashAlgorithm | None, + ) -> OCSPResponse: + if self._response is None: + raise ValueError("You must add a response before signing") + if self._responder_id is None: + raise ValueError("You must add a responder_id before signing") + + return ocsp.create_ocsp_response( + OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm + ) + + @classmethod + def build_unsuccessful( + cls, response_status: OCSPResponseStatus + ) -> OCSPResponse: + if not isinstance(response_status, OCSPResponseStatus): + raise TypeError( + "response_status must be an item from OCSPResponseStatus" + ) + if response_status is OCSPResponseStatus.SUCCESSFUL: + raise ValueError("response_status cannot be SUCCESSFUL") + + return ocsp.create_ocsp_response(response_status, None, None, None) + + +load_der_ocsp_request = ocsp.load_der_ocsp_request +load_der_ocsp_response = ocsp.load_der_ocsp_response diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/oid.py b/.venv/lib/python3.9/site-packages/cryptography/x509/oid.py new file mode 100644 index 0000000..520fc7a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/x509/oid.py @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat._oid import ( + AttributeOID, + AuthorityInformationAccessOID, + CertificatePoliciesOID, + CRLEntryExtensionOID, + ExtendedKeyUsageOID, + ExtensionOID, + NameOID, + ObjectIdentifier, + OCSPExtensionOID, + OtherNameFormOID, + PublicKeyAlgorithmOID, + SignatureAlgorithmOID, + SubjectInformationAccessOID, +) + +__all__ = [ + "AttributeOID", + "AuthorityInformationAccessOID", + "CRLEntryExtensionOID", + "CertificatePoliciesOID", + "ExtendedKeyUsageOID", + "ExtensionOID", + "NameOID", + "OCSPExtensionOID", + "ObjectIdentifier", + "OtherNameFormOID", + "PublicKeyAlgorithmOID", + "SignatureAlgorithmOID", + "SubjectInformationAccessOID", +] diff --git a/.venv/lib/python3.9/site-packages/cryptography/x509/verification.py b/.venv/lib/python3.9/site-packages/cryptography/x509/verification.py new file mode 100644 index 0000000..2db4324 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/cryptography/x509/verification.py @@ -0,0 +1,34 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.x509.general_name import DNSName, IPAddress + +__all__ = [ + "ClientVerifier", + "Criticality", + "ExtensionPolicy", + "Policy", + "PolicyBuilder", + "ServerVerifier", + "Store", + "Subject", + "VerificationError", + "VerifiedClient", +] + +Store = rust_x509.Store +Subject = typing.Union[DNSName, IPAddress] +VerifiedClient = rust_x509.VerifiedClient +ClientVerifier = rust_x509.ClientVerifier +ServerVerifier = rust_x509.ServerVerifier +PolicyBuilder = rust_x509.PolicyBuilder +Policy = rust_x509.Policy +ExtensionPolicy = rust_x509.ExtensionPolicy +Criticality = rust_x509.Criticality +VerificationError = rust_x509.VerificationError diff --git a/.venv/lib/python3.9/site-packages/distutils-precedence.pth b/.venv/lib/python3.9/site-packages/distutils-precedence.pth new file mode 100644 index 0000000..6de4198 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/distutils-precedence.pth @@ -0,0 +1 @@ +import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/LICENSE b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/LICENSE new file mode 100644 index 0000000..10e0dce --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2020 Jeff Forcier. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/METADATA b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/METADATA new file mode 100644 index 0000000..96599c1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/METADATA @@ -0,0 +1,77 @@ +Metadata-Version: 2.1 +Name: invoke +Version: 2.2.1 +Summary: Pythonic task execution +Home-page: https://pyinvoke.org +Author: Jeff Forcier +Author-email: jeff@bitprophet.org +License: BSD +Project-URL: Docs, https://docs.pyinvoke.org +Project-URL: Source, https://github.com/pyinvoke/invoke +Project-URL: Issues, https://github.com/pyinvoke/invoke/issues +Project-URL: Changelog, https://www.pyinvoke.org/changelog.html +Project-URL: CI, https://app.circleci.com/pipelines/github/pyinvoke/invoke +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: POSIX +Classifier: Operating System :: Unix +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Build Tools +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Software Distribution +Classifier: Topic :: System :: Systems Administration +Requires-Python: >=3.6 +License-File: LICENSE + + +|version| |python| |license| |ci| |coverage| + +.. |version| image:: https://img.shields.io/pypi/v/invoke + :target: https://pypi.org/project/invoke/ + :alt: PyPI - Package Version +.. |python| image:: https://img.shields.io/pypi/pyversions/invoke + :target: https://pypi.org/project/invoke/ + :alt: PyPI - Python Version +.. |license| image:: https://img.shields.io/pypi/l/invoke + :target: https://github.com/pyinvoke/invoke/blob/main/LICENSE + :alt: PyPI - License +.. |ci| image:: https://img.shields.io/circleci/build/github/pyinvoke/invoke/main + :target: https://app.circleci.com/pipelines/github/pyinvoke/invoke + :alt: CircleCI +.. |coverage| image:: https://img.shields.io/codecov/c/gh/pyinvoke/invoke + :target: https://app.codecov.io/gh/pyinvoke/invoke + :alt: Codecov + +Welcome to Invoke! +================== + +Invoke is a Python (2.7 and 3.4+) library for managing shell-oriented +subprocesses and organizing executable Python code into CLI-invokable tasks. It +draws inspiration from various sources (``make``/``rake``, Fabric 1.x, etc) to +arrive at a powerful & clean feature set. + +To find out what's new in this version of Invoke, please see `the changelog +`_. + +The project maintainer keeps a `roadmap +`_ on his website. + + +For a high level introduction, including example code, please see `our main +project website `_; or for detailed API docs, see `the +versioned API website `_. diff --git a/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/RECORD b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/RECORD new file mode 100644 index 0000000..ebfddcb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/RECORD @@ -0,0 +1,109 @@ +../../../bin/inv,sha256=nv9q7Cb2MMh69l4dmSXKW89XY3_icPJHcd7P2gh-jYo,261 +../../../bin/invoke,sha256=nv9q7Cb2MMh69l4dmSXKW89XY3_icPJHcd7P2gh-jYo,261 +invoke-2.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +invoke-2.2.1.dist-info/LICENSE,sha256=eSL5f4lvHRYeHr9HCD4wSiNjg2GyVcaQK15PNO7aDa0,1314 +invoke-2.2.1.dist-info/METADATA,sha256=_T4J728FxhvzvMX4Aw6nXnbMFZHU7VoPnoTA45HDVfw,3270 +invoke-2.2.1.dist-info/RECORD,, +invoke-2.2.1.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91 +invoke-2.2.1.dist-info/entry_points.txt,sha256=fz7lDPipw_V1nnuk41CNxa739dBpJ8TO9cpKuUXVDPs,81 +invoke-2.2.1.dist-info/top_level.txt,sha256=ZlTlAVMd8lzn3sXyAhCRi0LNslCasA7rlucRHq9w79w,7 +invoke/__init__.py,sha256=XBXrLV9I81Nq6ELEoN_XAE4vTNmM6Big-1eWt1dQH3k,2229 +invoke/__main__.py,sha256=nwyePAl8dedcetg1CiEQNC0-CHESN0DJy5tK2YEqGeo,47 +invoke/__pycache__/__init__.cpython-39.pyc,, +invoke/__pycache__/__main__.cpython-39.pyc,, +invoke/__pycache__/_version.cpython-39.pyc,, +invoke/__pycache__/collection.cpython-39.pyc,, +invoke/__pycache__/config.cpython-39.pyc,, +invoke/__pycache__/context.cpython-39.pyc,, +invoke/__pycache__/env.cpython-39.pyc,, +invoke/__pycache__/exceptions.cpython-39.pyc,, +invoke/__pycache__/executor.cpython-39.pyc,, +invoke/__pycache__/loader.cpython-39.pyc,, +invoke/__pycache__/main.cpython-39.pyc,, +invoke/__pycache__/program.cpython-39.pyc,, +invoke/__pycache__/runners.cpython-39.pyc,, +invoke/__pycache__/tasks.cpython-39.pyc,, +invoke/__pycache__/terminals.cpython-39.pyc,, +invoke/__pycache__/util.cpython-39.pyc,, +invoke/__pycache__/watchers.cpython-39.pyc,, +invoke/_version.py,sha256=woNEa1C-mSdvI4h8gKEotsc-bB7rAF7WZDpFO8oV7C0,80 +invoke/collection.py,sha256=0Qv9bnfKeUbkAaTLINxrzG9MBrhjwrhd7Rv6v6WgdKI,23060 +invoke/completion/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +invoke/completion/__pycache__/__init__.cpython-39.pyc,, +invoke/completion/__pycache__/complete.cpython-39.pyc,, +invoke/completion/bash.completion,sha256=OvJRVRvoW7aMDf5qMKidi8x-ND-KkIM5JS5-6iJ3eHE,1356 +invoke/completion/complete.py,sha256=j3Tv2oNrKyuQHPi5tavAUYN5vo8v4vABNl_T9t_0YOY,5222 +invoke/completion/fish.completion,sha256=G28g1dpA-Q9-Q6KmlcGd_6XMw8h7ExHjAJ6ny04ufr8,382 +invoke/completion/zsh.completion,sha256=YiekKS7ZDCIqLmlC31Ht9H95S2n8aNsfnn4bjBcFRLQ,1429 +invoke/config.py,sha256=NGBAmHWoUI_PHBxTAZNtRt2X4P09ouWRRzBdVj3MTac,49653 +invoke/context.py,sha256=k1KbmT2hDk846Su4wUuElZ4nXIYXYjqeIxuS0g_0_lc,25486 +invoke/env.py,sha256=T_ejssb-lmdZwPHFjJGH62KnhQKim-ziiWBxM-8ejW8,4394 +invoke/exceptions.py,sha256=e5vwp9cJS8teQK30WLY4ICKU8ateSUexU6BuFnEllDA,12227 +invoke/executor.py,sha256=T_iQ6uNTC2Z_LB7WWVxV4ab0jvYVKoKO_3hCFQ3FBlA,8855 +invoke/loader.py,sha256=C5dtubZRfjEdaF8WUi3blZjQxv8yGCtvipXrMMMSG4Y,6005 +invoke/main.py,sha256=njYYo2anK4krAE4mCV9Z9I1bRJFaOCp-UBL1vRE-Wq4,235 +invoke/parser/__init__.py,sha256=HpSB_sx2aZrCOUy5Cl_LMyfCBuGwrB9-d1OS4821Dzs,181 +invoke/parser/__pycache__/__init__.cpython-39.pyc,, +invoke/parser/__pycache__/argument.cpython-39.pyc,, +invoke/parser/__pycache__/context.cpython-39.pyc,, +invoke/parser/__pycache__/parser.cpython-39.pyc,, +invoke/parser/argument.py,sha256=eyIGaOtjyEz2M2BMiIz0cvn8J6zinpUhgxoXexJzHpk,6045 +invoke/parser/context.py,sha256=NaqvcN4E9W-7Ems48tJzIedBqcrZnbR41TA_rSE5JGo,9815 +invoke/parser/parser.py,sha256=g99NcgzHGfEjsNzwyFmAo_MRhqlGf82SXXBzOZXr6Rw,19809 +invoke/program.py,sha256=oBrtCjtAdycz3UDCJM5vTrdHE37SL7UPw-AzzcwWsRU,38177 +invoke/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +invoke/runners.py,sha256=4WJHoj8UFggD6e-F12hFsTiHTHBO5fHEZZAIduFNWNo,65509 +invoke/tasks.py,sha256=FBj_EStMXfQ0Y9Nw8gPctRxwKp4Lya8QdnrDklVVJVk,19946 +invoke/terminals.py,sha256=COszJimzGyA7FcPo8kgOmVDl4v5kciU2w3-Ts3LOo4g,8148 +invoke/util.py,sha256=NKjJoZA4Tb8VzAvr58gIAWvQBaTFDKRg3bjRSxdeMcs,10018 +invoke/vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +invoke/vendor/__pycache__/__init__.cpython-39.pyc,, +invoke/vendor/fluidity/__init__.py,sha256=f5SF3sMYo71808nX-9bJt0pb5-BCZ2srikX1aCqczpw,196 +invoke/vendor/fluidity/__pycache__/__init__.cpython-39.pyc,, +invoke/vendor/fluidity/__pycache__/backwardscompat.cpython-39.pyc,, +invoke/vendor/fluidity/__pycache__/machine.cpython-39.pyc,, +invoke/vendor/fluidity/backwardscompat.py,sha256=P_qC1dIhIHq6LQYDGxWed-V8o3viMcPOYv4mx0NY52U,135 +invoke/vendor/fluidity/machine.py,sha256=9ZhzmAg-3Q_5jCWHOih1WtoTMAKhkgT2EePkXW-258M,8686 +invoke/vendor/lexicon/__init__.py,sha256=iFssP2WfLyW5pmp4EDFvd_63zvAyZ_7YKh2nrfbvlHY,1133 +invoke/vendor/lexicon/__pycache__/__init__.cpython-39.pyc,, +invoke/vendor/lexicon/__pycache__/_version.cpython-39.pyc,, +invoke/vendor/lexicon/__pycache__/alias_dict.cpython-39.pyc,, +invoke/vendor/lexicon/__pycache__/attribute_dict.cpython-39.pyc,, +invoke/vendor/lexicon/_version.py,sha256=GFgEreRHgT-8UlwM974VYEId1Wj19fqMAk-hpObAjeI,80 +invoke/vendor/lexicon/alias_dict.py,sha256=gsSlVPy3wt5HGJSzjhdBiMVcPnIuVLU3Sl1VbybVs5w,3223 +invoke/vendor/lexicon/attribute_dict.py,sha256=j2myombp3ZH3fTOy4RhVAyZXrP_-1iwVqdenEvY2u-Q,407 +invoke/vendor/yaml/__init__.py,sha256=gfp2CbRVhzknghkiiJD2l6Z0pI-mv_iZHPSJ4aj0-nY,13170 +invoke/vendor/yaml/__pycache__/__init__.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/composer.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/constructor.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/cyaml.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/dumper.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/emitter.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/error.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/events.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/loader.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/nodes.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/parser.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/reader.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/representer.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/resolver.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/scanner.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/serializer.cpython-39.pyc,, +invoke/vendor/yaml/__pycache__/tokens.cpython-39.pyc,, +invoke/vendor/yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 +invoke/vendor/yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 +invoke/vendor/yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 +invoke/vendor/yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837 +invoke/vendor/yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006 +invoke/vendor/yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533 +invoke/vendor/yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 +invoke/vendor/yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061 +invoke/vendor/yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 +invoke/vendor/yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495 +invoke/vendor/yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794 +invoke/vendor/yaml/representer.py,sha256=82UM3ZxUQKqsKAF4ltWOxCS6jGPIFtXpGs7mvqyv4Xs,14184 +invoke/vendor/yaml/resolver.py,sha256=Z1W8AOMA6Proy4gIO2OhUO4IPS_bFNAl0Ca3rwChpPg,8999 +invoke/vendor/yaml/scanner.py,sha256=KeQIKGNlSyPE8QDwionHxy9CgbqE5teJEz05FR9-nAg,51277 +invoke/vendor/yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165 +invoke/vendor/yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 +invoke/watchers.py,sha256=E8CB8ikiXFw-15snsFnbZjwq-exFMqNqFOqkGgcsqLs,5097 diff --git a/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/WHEEL new file mode 100644 index 0000000..1f64615 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.3.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/entry_points.txt b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/entry_points.txt new file mode 100644 index 0000000..56faa37 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +inv = invoke.main:program.run +invoke = invoke.main:program.run diff --git a/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/top_level.txt new file mode 100644 index 0000000..460820d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke-2.2.1.dist-info/top_level.txt @@ -0,0 +1 @@ +invoke diff --git a/.venv/lib/python3.9/site-packages/invoke/__init__.py b/.venv/lib/python3.9/site-packages/invoke/__init__.py new file mode 100644 index 0000000..b707267 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/__init__.py @@ -0,0 +1,70 @@ +from typing import Any, Optional + +from ._version import __version_info__, __version__ # noqa +from .collection import Collection # noqa +from .config import Config # noqa +from .context import Context, MockContext # noqa +from .exceptions import ( # noqa + AmbiguousEnvVar, + AuthFailure, + CollectionNotFound, + Exit, + ParseError, + PlatformError, + ResponseNotAccepted, + SubprocessPipeError, + ThreadException, + UncastableEnvVar, + UnexpectedExit, + UnknownFileType, + UnpicklableConfigMember, + WatcherError, + CommandTimedOut, +) +from .executor import Executor # noqa +from .loader import FilesystemLoader # noqa +from .parser import Argument, Parser, ParserContext, ParseResult # noqa +from .program import Program # noqa +from .runners import Runner, Local, Failure, Result, Promise # noqa +from .tasks import task, call, Call, Task # noqa +from .terminals import pty_size # noqa +from .watchers import FailingResponder, Responder, StreamWatcher # noqa + + +def run(command: str, **kwargs: Any) -> Optional[Result]: + """ + Run ``command`` in a subprocess and return a `.Result` object. + + See `.Runner.run` for API details. + + .. note:: + This function is a convenience wrapper around Invoke's `.Context` and + `.Runner` APIs. + + Specifically, it creates an anonymous `.Context` instance and calls its + `~.Context.run` method, which in turn defaults to using a `.Local` + runner subclass for command execution. + + .. versionadded:: 1.0 + """ + return Context().run(command, **kwargs) + + +def sudo(command: str, **kwargs: Any) -> Optional[Result]: + """ + Run ``command`` in a ``sudo`` subprocess and return a `.Result` object. + + See `.Context.sudo` for API details, such as the ``password`` kwarg. + + .. note:: + This function is a convenience wrapper around Invoke's `.Context` and + `.Runner` APIs. + + Specifically, it creates an anonymous `.Context` instance and calls its + `~.Context.sudo` method, which in turn defaults to using a `.Local` + runner subclass for command execution (plus sudo-related bits & + pieces). + + .. versionadded:: 1.4 + """ + return Context().sudo(command, **kwargs) diff --git a/.venv/lib/python3.9/site-packages/invoke/__main__.py b/.venv/lib/python3.9/site-packages/invoke/__main__.py new file mode 100644 index 0000000..2c8118c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/__main__.py @@ -0,0 +1,3 @@ +from invoke.main import program + +program.run() diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..873521f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/__main__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/__main__.cpython-39.pyc new file mode 100644 index 0000000..4b9e9ff Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/__main__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/_version.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/_version.cpython-39.pyc new file mode 100644 index 0000000..0aeebdd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/_version.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/collection.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/collection.cpython-39.pyc new file mode 100644 index 0000000..84b1b5e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/collection.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/config.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/config.cpython-39.pyc new file mode 100644 index 0000000..5d517a4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/config.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/context.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/context.cpython-39.pyc new file mode 100644 index 0000000..b8a35a7 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/context.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/env.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/env.cpython-39.pyc new file mode 100644 index 0000000..f0cab10 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/env.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/exceptions.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..897f98d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/exceptions.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/executor.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/executor.cpython-39.pyc new file mode 100644 index 0000000..2510632 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/executor.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/loader.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/loader.cpython-39.pyc new file mode 100644 index 0000000..c7a5815 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/loader.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/main.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/main.cpython-39.pyc new file mode 100644 index 0000000..53369d3 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/main.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/program.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/program.cpython-39.pyc new file mode 100644 index 0000000..af14d42 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/program.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/runners.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/runners.cpython-39.pyc new file mode 100644 index 0000000..533ed38 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/runners.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/tasks.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/tasks.cpython-39.pyc new file mode 100644 index 0000000..482bb8f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/tasks.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/terminals.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/terminals.cpython-39.pyc new file mode 100644 index 0000000..9e392f4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/terminals.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/util.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/util.cpython-39.pyc new file mode 100644 index 0000000..1561721 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/util.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/__pycache__/watchers.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/__pycache__/watchers.cpython-39.pyc new file mode 100644 index 0000000..0824316 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/__pycache__/watchers.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/_version.py b/.venv/lib/python3.9/site-packages/invoke/_version.py new file mode 100644 index 0000000..14efac7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/_version.py @@ -0,0 +1,2 @@ +__version_info__ = (2, 2, 1) +__version__ = ".".join(map(str, __version_info__)) diff --git a/.venv/lib/python3.9/site-packages/invoke/collection.py b/.venv/lib/python3.9/site-packages/invoke/collection.py new file mode 100644 index 0000000..23dcff9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/collection.py @@ -0,0 +1,608 @@ +import copy +from types import ModuleType +from typing import Any, Callable, Dict, List, Optional, Tuple + +from .util import Lexicon, helpline + +from .config import merge_dicts, copy_dict +from .parser import Context as ParserContext +from .tasks import Task + + +class Collection: + """ + A collection of executable tasks. See :doc:`/concepts/namespaces`. + + .. versionadded:: 1.0 + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """ + Create a new task collection/namespace. + + `.Collection` offers a set of methods for building a collection of + tasks from scratch, plus a convenient constructor wrapping said API. + + In either case: + + * The first positional argument may be a string, which (if given) is + used as the collection's default name when performing namespace + lookups; + * A ``loaded_from`` keyword argument may be given, which sets metadata + indicating the filesystem path the collection was loaded from. This + is used as a guide when loading per-project :ref:`configuration files + `. + * An ``auto_dash_names`` kwarg may be given, controlling whether task + and collection names have underscores turned to dashes in most cases; + it defaults to ``True`` but may be set to ``False`` to disable. + + The CLI machinery will pass in the value of the + ``tasks.auto_dash_names`` config value to this kwarg. + + **The method approach** + + May initialize with no arguments and use methods (e.g. + `.add_task`/`.add_collection`) to insert objects:: + + c = Collection() + c.add_task(some_task) + + If an initial string argument is given, it is used as the default name + for this collection, should it be inserted into another collection as a + sub-namespace:: + + docs = Collection('docs') + docs.add_task(doc_task) + ns = Collection() + ns.add_task(top_level_task) + ns.add_collection(docs) + # Valid identifiers are now 'top_level_task' and 'docs.doc_task' + # (assuming the task objects were actually named the same as the + # variables we're using :)) + + For details, see the API docs for the rest of the class. + + **The constructor approach** + + All ``*args`` given to `.Collection` (besides the abovementioned + optional positional 'name' argument and ``loaded_from`` kwarg) are + expected to be `.Task` or `.Collection` instances which will be passed + to `.add_task`/`.add_collection` as appropriate. Module objects are + also valid (as they are for `.add_collection`). For example, the below + snippet results in the same two task identifiers as the one above:: + + ns = Collection(top_level_task, Collection('docs', doc_task)) + + If any ``**kwargs`` are given, the keywords are used as the initial + name arguments for the respective values:: + + ns = Collection( + top_level_task=some_other_task, + docs=Collection(doc_task) + ) + + That's exactly equivalent to:: + + docs = Collection(doc_task) + ns = Collection() + ns.add_task(some_other_task, 'top_level_task') + ns.add_collection(docs, 'docs') + + See individual methods' API docs for details. + """ + # Initialize + self.tasks = Lexicon() + self.collections = Lexicon() + self.default: Optional[str] = None + self.name = None + self._configuration: Dict[str, Any] = {} + # Specific kwargs if applicable + self.loaded_from = kwargs.pop("loaded_from", None) + self.auto_dash_names = kwargs.pop("auto_dash_names", None) + # splat-kwargs version of default value (auto_dash_names=True) + if self.auto_dash_names is None: + self.auto_dash_names = True + # Name if applicable + _args = list(args) + if _args and isinstance(args[0], str): + self.name = self.transform(_args.pop(0)) + # Dispatch args/kwargs + for arg in _args: + self._add_object(arg) + # Dispatch kwargs + for name, obj in kwargs.items(): + self._add_object(obj, name) + + def _add_object(self, obj: Any, name: Optional[str] = None) -> None: + method: Callable + if isinstance(obj, Task): + method = self.add_task + elif isinstance(obj, (Collection, ModuleType)): + method = self.add_collection + else: + raise TypeError("No idea how to insert {!r}!".format(type(obj))) + method(obj, name=name) + + def __repr__(self) -> str: + task_names = list(self.tasks.keys()) + collections = ["{}...".format(x) for x in self.collections.keys()] + return "".format( + self.name, ", ".join(sorted(task_names) + sorted(collections)) + ) + + def __eq__(self, other: object) -> bool: + if isinstance(other, Collection): + return ( + self.name == other.name + and self.tasks == other.tasks + and self.collections == other.collections + ) + return False + + def __bool__(self) -> bool: + return bool(self.task_names) + + @classmethod + def from_module( + cls, + module: ModuleType, + name: Optional[str] = None, + config: Optional[Dict[str, Any]] = None, + loaded_from: Optional[str] = None, + auto_dash_names: Optional[bool] = None, + ) -> "Collection": + """ + Return a new `.Collection` created from ``module``. + + Inspects ``module`` for any `.Task` instances and adds them to a new + `.Collection`, returning it. If any explicit namespace collections + exist (named ``ns`` or ``namespace``) a copy of that collection object + is preferentially loaded instead. + + When the implicit/default collection is generated, it will be named + after the module's ``__name__`` attribute, or its last dotted section + if it's a submodule. (I.e. it should usually map to the actual ``.py`` + filename.) + + Explicitly given collections will only be given that module-derived + name if they don't already have a valid ``.name`` attribute. + + If the module has a docstring (``__doc__``) it is copied onto the + resulting `.Collection` (and used for display in help, list etc + output.) + + :param str name: + A string, which if given will override any automatically derived + collection name (or name set on the module's root namespace, if it + has one.) + + :param dict config: + Used to set config options on the newly created `.Collection` + before returning it (saving you a call to `.configure`.) + + If the imported module had a root namespace object, ``config`` is + merged on top of it (i.e. overriding any conflicts.) + + :param str loaded_from: + Identical to the same-named kwarg from the regular class + constructor - should be the path where the module was + found. + + :param bool auto_dash_names: + Identical to the same-named kwarg from the regular class + constructor - determines whether emitted names are auto-dashed. + + .. versionadded:: 1.0 + """ + module_name = module.__name__.split(".")[-1] + + def instantiate(obj_name: Optional[str] = None) -> "Collection": + # Explicitly given name wins over root ns name (if applicable), + # which wins over actual module name. + args = [name or obj_name or module_name] + kwargs = dict( + loaded_from=loaded_from, auto_dash_names=auto_dash_names + ) + instance = cls(*args, **kwargs) + instance.__doc__ = module.__doc__ + return instance + + # See if the module provides a default NS to use in lieu of creating + # our own collection. + for candidate in ("ns", "namespace"): + obj = getattr(module, candidate, None) + if obj and isinstance(obj, Collection): + # TODO: make this into Collection.clone() or similar? + ret = instantiate(obj_name=obj.name) + ret.tasks = ret._transform_lexicon(obj.tasks) + ret.collections = ret._transform_lexicon(obj.collections) + ret.default = ( + ret.transform(obj.default) if obj.default else None + ) + # Explicitly given config wins over root ns config + obj_config = copy_dict(obj._configuration) + if config: + merge_dicts(obj_config, config) + ret._configuration = obj_config + return ret + # Failing that, make our own collection from the module's tasks. + tasks = filter(lambda x: isinstance(x, Task), vars(module).values()) + # Again, explicit name wins over implicit one from module path + collection = instantiate() + for task in tasks: + collection.add_task(task) + if config: + collection.configure(config) + return collection + + def add_task( + self, + task: "Task", + name: Optional[str] = None, + aliases: Optional[Tuple[str, ...]] = None, + default: Optional[bool] = None, + ) -> None: + """ + Add `.Task` ``task`` to this collection. + + :param task: The `.Task` object to add to this collection. + + :param name: + Optional string name to bind to (overrides the task's own + self-defined ``name`` attribute and/or any Python identifier (i.e. + ``.func_name``.) + + :param aliases: + Optional iterable of additional names to bind the task as, on top + of the primary name. These will be used in addition to any aliases + the task itself declares internally. + + :param default: Whether this task should be the collection default. + + .. versionadded:: 1.0 + """ + if name is None: + if task.name: + name = task.name + # XXX https://github.com/python/mypy/issues/1424 + elif hasattr(task.body, "func_name"): + name = task.body.func_name # type: ignore + elif hasattr(task.body, "__name__"): + name = task.__name__ + else: + raise ValueError("Could not obtain a name for this task!") + name = self.transform(name) + if name in self.collections: + err = "Name conflict: this collection has a sub-collection named {!r} already" # noqa + raise ValueError(err.format(name)) + self.tasks[name] = task + for alias in list(task.aliases) + list(aliases or []): + self.tasks.alias(self.transform(alias), to=name) + if default is True or (default is None and task.is_default): + self._check_default_collision(name) + self.default = name + + def add_collection( + self, + coll: "Collection", + name: Optional[str] = None, + default: Optional[bool] = None, + ) -> None: + """ + Add `.Collection` ``coll`` as a sub-collection of this one. + + :param coll: The `.Collection` to add. + + :param str name: + The name to attach the collection as. Defaults to the collection's + own internal name. + + :param default: + Whether this sub-collection('s default task-or-collection) should + be the default invocation of the parent collection. + + .. versionadded:: 1.0 + .. versionchanged:: 1.5 + Added the ``default`` parameter. + """ + # Handle module-as-collection + if isinstance(coll, ModuleType): + coll = Collection.from_module(coll) + # Ensure we have a name, or die trying + name = name or coll.name + if not name: + raise ValueError("Non-root collections must have a name!") + name = self.transform(name) + # Test for conflict + if name in self.tasks: + err = "Name conflict: this collection has a task named {!r} already" # noqa + raise ValueError(err.format(name)) + # Insert + self.collections[name] = coll + if default: + self._check_default_collision(name) + self.default = name + + def _check_default_collision(self, name: str) -> None: + if self.default: + msg = "'{}' cannot be the default because '{}' already is!" + raise ValueError(msg.format(name, self.default)) + + def _split_path(self, path: str) -> Tuple[str, str]: + """ + Obtain first collection + remainder, of a task path. + + E.g. for ``"subcollection.taskname"``, return ``("subcollection", + "taskname")``; for ``"subcollection.nested.taskname"`` return + ``("subcollection", "nested.taskname")``, etc. + + An empty path becomes simply ``('', '')``. + """ + parts = path.split(".") + coll = parts.pop(0) + rest = ".".join(parts) + return coll, rest + + def subcollection_from_path(self, path: str) -> "Collection": + """ + Given a ``path`` to a subcollection, return that subcollection. + + .. versionadded:: 1.0 + """ + parts = path.split(".") + collection = self + while parts: + collection = collection.collections[parts.pop(0)] + return collection + + def __getitem__(self, name: Optional[str] = None) -> Any: + """ + Returns task named ``name``. Honors aliases and subcollections. + + If this collection has a default task, it is returned when ``name`` is + empty or ``None``. If empty input is given and no task has been + selected as the default, ValueError will be raised. + + Tasks within subcollections should be given in dotted form, e.g. + 'foo.bar'. Subcollection default tasks will be returned on the + subcollection's name. + + .. versionadded:: 1.0 + """ + return self.task_with_config(name)[0] + + def _task_with_merged_config( + self, coll: str, rest: str, ours: Dict[str, Any] + ) -> Tuple[str, Dict[str, Any]]: + task, config = self.collections[coll].task_with_config(rest) + return task, dict(config, **ours) + + def task_with_config( + self, name: Optional[str] + ) -> Tuple[str, Dict[str, Any]]: + """ + Return task named ``name`` plus its configuration dict. + + E.g. in a deeply nested tree, this method returns the `.Task`, and a + configuration dict created by merging that of this `.Collection` and + any nested `Collections <.Collection>`, up through the one actually + holding the `.Task`. + + See `~.Collection.__getitem__` for semantics of the ``name`` argument. + + :returns: Two-tuple of (`.Task`, `dict`). + + .. versionadded:: 1.0 + """ + # Our top level configuration + ours = self.configuration() + # Default task for this collection itself + if not name: + if not self.default: + raise ValueError("This collection has no default task.") + return self[self.default], ours + # Normalize name to the format we're expecting + name = self.transform(name) + # Non-default tasks within subcollections -> recurse (sorta) + if "." in name: + coll, rest = self._split_path(name) + return self._task_with_merged_config(coll, rest, ours) + # Default task for subcollections (via empty-name lookup) + if name in self.collections: + return self._task_with_merged_config(name, "", ours) + # Regular task lookup + return self.tasks[name], ours + + def __contains__(self, name: str) -> bool: + try: + self[name] + return True + except KeyError: + return False + + def to_contexts( + self, ignore_unknown_help: Optional[bool] = None + ) -> List[ParserContext]: + """ + Returns all contained tasks and subtasks as a list of parser contexts. + + :param bool ignore_unknown_help: + Passed on to each task's ``get_arguments()`` method. See the config + option by the same name for details. + + .. versionadded:: 1.0 + .. versionchanged:: 1.7 + Added the ``ignore_unknown_help`` kwarg. + """ + result = [] + for primary, aliases in self.task_names.items(): + task = self[primary] + result.append( + ParserContext( + name=primary, + aliases=aliases, + args=task.get_arguments( + ignore_unknown_help=ignore_unknown_help + ), + ) + ) + return result + + def subtask_name(self, collection_name: str, task_name: str) -> str: + return ".".join( + [self.transform(collection_name), self.transform(task_name)] + ) + + def transform(self, name: str) -> str: + """ + Transform ``name`` with the configured auto-dashes behavior. + + If the collection's ``auto_dash_names`` attribute is ``True`` + (default), all non leading/trailing underscores are turned into dashes. + (Leading/trailing underscores tend to get stripped elsewhere in the + stack.) + + If it is ``False``, the inverse is applied - all dashes are turned into + underscores. + + .. versionadded:: 1.0 + """ + # Short-circuit on anything non-applicable, e.g. empty strings, bools, + # None, etc. + if not name: + return name + from_, to = "_", "-" + if not self.auto_dash_names: + from_, to = "-", "_" + replaced = [] + end = len(name) - 1 + for i, char in enumerate(name): + # Don't replace leading or trailing underscores (+ taking dotted + # names into account) + # TODO: not 100% convinced of this / it may be exposing a + # discrepancy between this level & higher levels which tend to + # strip out leading/trailing underscores entirely. + if ( + i not in (0, end) + and char == from_ + and name[i - 1] != "." + and name[i + 1] != "." + ): + char = to + replaced.append(char) + return "".join(replaced) + + def _transform_lexicon(self, old: Lexicon) -> Lexicon: + """ + Take a Lexicon and apply `.transform` to its keys and aliases. + + :returns: A new Lexicon. + """ + new = Lexicon() + # Lexicons exhibit only their real keys in most places, so this will + # only grab those, not aliases. + for key, value in old.items(): + # Deepcopy the value so we're not just copying a reference + new[self.transform(key)] = copy.deepcopy(value) + # Also copy all aliases, which are string-to-string key mappings + for key, value in old.aliases.items(): + new.alias(from_=self.transform(key), to=self.transform(value)) + return new + + @property + def task_names(self) -> Dict[str, List[str]]: + """ + Return all task identifiers for this collection as a one-level dict. + + Specifically, a dict with the primary/"real" task names as the key, and + any aliases as a list value. + + It basically collapses the namespace tree into a single + easily-scannable collection of invocation strings, and is thus suitable + for things like flat-style task listings or transformation into parser + contexts. + + .. versionadded:: 1.0 + """ + ret = {} + # Our own tasks get no prefix, just go in as-is: {name: [aliases]} + for name, task in self.tasks.items(): + ret[name] = list(map(self.transform, task.aliases)) + # Subcollection tasks get both name + aliases prefixed + for coll_name, coll in self.collections.items(): + for task_name, aliases in coll.task_names.items(): + aliases = list( + map(lambda x: self.subtask_name(coll_name, x), aliases) + ) + # Tack on collection name to alias list if this task is the + # collection's default. + if coll.default == task_name: + aliases += (coll_name,) + ret[self.subtask_name(coll_name, task_name)] = aliases + return ret + + def configuration(self, taskpath: Optional[str] = None) -> Dict[str, Any]: + """ + Obtain merged configuration values from collection & children. + + :param taskpath: + (Optional) Task name/path, identical to that used for + `~.Collection.__getitem__` (e.g. may be dotted for nested tasks, + etc.) Used to decide which path to follow in the collection tree + when merging config values. + + :returns: A `dict` containing configuration values. + + .. versionadded:: 1.0 + """ + if taskpath is None: + return copy_dict(self._configuration) + return self.task_with_config(taskpath)[1] + + def configure(self, options: Dict[str, Any]) -> None: + """ + (Recursively) merge ``options`` into the current `.configuration`. + + Options configured this way will be available to all tasks. It is + recommended to use unique keys to avoid potential clashes with other + config options + + For example, if you were configuring a Sphinx docs build target + directory, it's better to use a key like ``'sphinx.target'`` than + simply ``'target'``. + + :param options: An object implementing the dictionary protocol. + :returns: ``None``. + + .. versionadded:: 1.0 + """ + merge_dicts(self._configuration, options) + + def serialized(self) -> Dict[str, Any]: + """ + Return an appropriate-for-serialization version of this object. + + See the documentation for `.Program` and its ``json`` task listing + format; this method is the driver for that functionality. + + .. versionadded:: 1.0 + """ + return { + "name": self.name, + "help": helpline(self), + "default": self.default, + "tasks": [ + { + "name": self.transform(x.name), + "help": helpline(x), + "aliases": [self.transform(y) for y in x.aliases], + } + for x in sorted(self.tasks.values(), key=lambda x: x.name) + ], + "collections": [ + x.serialized() + for x in sorted( + self.collections.values(), key=lambda x: x.name or "" + ) + ], + } diff --git a/.venv/lib/python3.9/site-packages/invoke/completion/__init__.py b/.venv/lib/python3.9/site-packages/invoke/completion/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/invoke/completion/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/completion/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..a4382fb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/completion/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/completion/__pycache__/complete.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/completion/__pycache__/complete.cpython-39.pyc new file mode 100644 index 0000000..8810db3 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/completion/__pycache__/complete.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/completion/bash.completion b/.venv/lib/python3.9/site-packages/invoke/completion/bash.completion new file mode 100644 index 0000000..55f7c39 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/completion/bash.completion @@ -0,0 +1,32 @@ +# Invoke tab-completion script to be sourced with Bash shell. +# Known to work on Bash 3.x, untested on 4.x. + +_complete_{binary}() {{ + local candidates + + # COMP_WORDS contains the entire command string up til now (including + # program name). + # We hand it to Invoke so it can figure out the current context: spit back + # core options, task names, the current task's options, or some combo. + candidates=`{binary} --complete -- ${{COMP_WORDS[*]}}` + + # `compgen -W` takes list of valid options & a partial word & spits back + # possible matches. Necessary for any partial word completions (vs + # completions performed when no partial words are present). + # + # $2 is the current word or token being tabbed on, either empty string or a + # partial word, and thus wants to be compgen'd to arrive at some subset of + # our candidate list which actually matches. + # + # COMPREPLY is the list of valid completions handed back to `complete`. + COMPREPLY=( $(compgen -W "${{candidates}}" -- $2) ) +}} + + +# Tell shell builtin to use the above for completing our invocations. +# * -F: use given function name to generate completions. +# * -o default: when function generates no results, use filenames. +# * positional args: program names to complete for. +complete -F _complete_{binary} -o default {spaced_names} + +# vim: set ft=sh : diff --git a/.venv/lib/python3.9/site-packages/invoke/completion/complete.py b/.venv/lib/python3.9/site-packages/invoke/completion/complete.py new file mode 100644 index 0000000..97e9a95 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/completion/complete.py @@ -0,0 +1,129 @@ +""" +Command-line completion mechanisms, executed by the core ``--complete`` flag. +""" + +from typing import List +import glob +import os +import re +import shlex +from typing import TYPE_CHECKING + +from ..exceptions import Exit, ParseError +from ..util import debug, task_name_sort_key + +if TYPE_CHECKING: + from ..collection import Collection + from ..parser import Parser, ParseResult, ParserContext + + +def complete( + names: List[str], + core: "ParseResult", + initial_context: "ParserContext", + collection: "Collection", + parser: "Parser", +) -> Exit: + # Strip out program name (scripts give us full command line) + # TODO: this may not handle path/to/script though? + invocation = re.sub(r"^({}) ".format("|".join(names)), "", core.remainder) + debug("Completing for invocation: {!r}".format(invocation)) + # Tokenize (shlex will have to do) + tokens = shlex.split(invocation) + # Handle flags (partial or otherwise) + if tokens and tokens[-1].startswith("-"): + tail = tokens[-1] + debug("Invocation's tail {!r} is flag-like".format(tail)) + # Gently parse invocation to obtain 'current' context. + # Use last seen context in case of failure (required for + # otherwise-invalid partial invocations being completed). + + contexts: List[ParserContext] + try: + debug("Seeking context name in tokens: {!r}".format(tokens)) + contexts = parser.parse_argv(tokens) + except ParseError as e: + msg = "Got parser error ({!r}), grabbing its last-seen context {!r}" # noqa + debug(msg.format(e, e.context)) + contexts = [e.context] if e.context is not None else [] + # Fall back to core context if no context seen. + debug("Parsed invocation, contexts: {!r}".format(contexts)) + if not contexts or not contexts[-1]: + context = initial_context + else: + context = contexts[-1] + debug("Selected context: {!r}".format(context)) + # Unknown flags (could be e.g. only partially typed out; could be + # wholly invalid; doesn't matter) complete with flags. + debug("Looking for {!r} in {!r}".format(tail, context.flags)) + if tail not in context.flags: + debug("Not found, completing with flag names") + # Long flags - partial or just the dashes - complete w/ long flags + if tail.startswith("--"): + for name in filter( + lambda x: x.startswith("--"), context.flag_names() + ): + print(name) + # Just a dash, completes with all flags + elif tail == "-": + for name in context.flag_names(): + print(name) + # Otherwise, it's something entirely invalid (a shortflag not + # recognized, or a java style flag like -foo) so return nothing + # (the shell will still try completing with files, but that doesn't + # hurt really.) + else: + pass + # Known flags complete w/ nothing or tasks, depending + else: + # Flags expecting values: do nothing, to let default (usually + # file) shell completion occur (which we actively want in this + # case.) + if context.flags[tail].takes_value: + debug("Found, and it takes a value, so no completion") + pass + # Not taking values (eg bools): print task names + else: + debug("Found, takes no value, printing task names") + print_task_names(collection) + # If not a flag, is either task name or a flag value, so just complete + # task names. + else: + debug("Last token isn't flag-like, just printing task names") + print_task_names(collection) + raise Exit + + +def print_task_names(collection: "Collection") -> None: + for name in sorted(collection.task_names, key=task_name_sort_key): + print(name) + # Just stick aliases after the thing they're aliased to. Sorting isn't + # so important that it's worth bending over backwards here. + for alias in collection.task_names[name]: + print(alias) + + +def print_completion_script(shell: str, names: List[str]) -> None: + # Grab all .completion files in invoke/completion/. (These used to have no + # suffix, but surprise, that's super fragile. + completions = { + os.path.splitext(os.path.basename(x))[0]: x + for x in glob.glob( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "*.completion" + ) + ) + } + try: + path = completions[shell] + except KeyError: + err = 'Completion for shell "{}" not supported (options are: {}).' + raise ParseError(err.format(shell, ", ".join(sorted(completions)))) + debug("Printing completion script from {}".format(path)) + # Choose one arbitrary program name for script's own internal invocation + # (also used to construct completion function names when necessary) + binary = names[0] + with open(path, "r") as script: + print( + script.read().format(binary=binary, spaced_names=" ".join(names)) + ) diff --git a/.venv/lib/python3.9/site-packages/invoke/completion/fish.completion b/.venv/lib/python3.9/site-packages/invoke/completion/fish.completion new file mode 100644 index 0000000..5f479a1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/completion/fish.completion @@ -0,0 +1,10 @@ +# Invoke tab-completion script for the fish shell +# Copy it to the ~/.config/fish/completions directory + +function __complete_{binary} + {binary} --complete -- (commandline --tokenize) +end + +# --no-files: Don't complete files unless invoke gives an empty result +# TODO: find a way to honor all binary_names +complete --command {binary} --no-files --arguments '(__complete_{binary})' diff --git a/.venv/lib/python3.9/site-packages/invoke/completion/zsh.completion b/.venv/lib/python3.9/site-packages/invoke/completion/zsh.completion new file mode 100644 index 0000000..2fb7d12 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/completion/zsh.completion @@ -0,0 +1,33 @@ +# Invoke tab-completion script to be sourced with the Z shell. +# Known to work on zsh 5.0.x, probably works on later 4.x releases as well (as +# it uses the older compctl completion system). + +_complete_{binary}() {{ + # `words` contains the entire command string up til now (including + # program name). + # + # We hand it to Invoke so it can figure out the current context: spit back + # core options, task names, the current task's options, or some combo. + # + # Before doing so, we attempt to tease out any collection flag+arg so we + # can ensure it is applied correctly. + collection_arg='' + if [[ "${{words}}" =~ "(-c|--collection) [^ ]+" ]]; then + collection_arg=$MATCH + fi + # `reply` is the array of valid completions handed back to `compctl`. + # Use ${{=...}} to force whitespace splitting in expansion of + # $collection_arg + reply=( $({binary} ${{=collection_arg}} --complete -- ${{words}}) ) +}} + + +# Tell shell builtin to use the above for completing our given binary name(s). +# * -K: use given function name to generate completions. +# * +: specifies 'alternative' completion, where options after the '+' are only +# used if the completion from the options before the '+' result in no matches. +# * -f: when function generates no results, use filenames. +# * positional args: program names to complete for. +compctl -K _complete_{binary} + -f {spaced_names} + +# vim: set ft=sh : diff --git a/.venv/lib/python3.9/site-packages/invoke/config.py b/.venv/lib/python3.9/site-packages/invoke/config.py new file mode 100644 index 0000000..64e3846 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/config.py @@ -0,0 +1,1283 @@ +import copy +import json +import os +import types +from importlib.util import spec_from_loader +from os import PathLike +from os.path import join, splitext, expanduser +from types import ModuleType +from typing import Any, Dict, Iterator, Optional, Tuple, Type, Union + +from .env import Environment +from .exceptions import UnknownFileType, UnpicklableConfigMember +from .runners import Local +from .terminals import WINDOWS +from .util import debug, yaml + + +try: + from importlib.machinery import SourceFileLoader +except ImportError: # PyPy3 + from importlib._bootstrap import ( # type: ignore[no-redef] + _SourceFileLoader as SourceFileLoader, + ) + + +def load_source(name: str, path: str) -> Dict[str, Any]: + if not os.path.exists(path): + return {} + loader = SourceFileLoader("mod", path) + mod = ModuleType("mod") + mod.__spec__ = spec_from_loader("mod", loader) + loader.exec_module(mod) + return vars(mod) + + +class DataProxy: + """ + Helper class implementing nested dict+attr access for `.Config`. + + Specifically, is used both for `.Config` itself, and to wrap any other + dicts assigned as config values (recursively). + + .. warning:: + All methods (of this object or in subclasses) must take care to + initialize new attributes via ``self._set(name='value')``, or they'll + run into recursion errors! + + .. versionadded:: 1.0 + """ + + # Attributes which get proxied through to inner merged-dict config obj. + _proxies = ( + tuple( + """ + get + has_key + items + iteritems + iterkeys + itervalues + keys + values + """.split() + ) + + tuple( + "__{}__".format(x) + for x in """ + cmp + contains + iter + sizeof + """.split() + ) + ) + + @classmethod + def from_data( + cls, + data: Dict[str, Any], + root: Optional["DataProxy"] = None, + keypath: Tuple[str, ...] = tuple(), + ) -> "DataProxy": + """ + Alternate constructor for 'baby' DataProxies used as sub-dict values. + + Allows creating standalone DataProxy objects while also letting + subclasses like `.Config` define their own ``__init__`` without + muddling the two. + + :param dict data: + This particular DataProxy's personal data. Required, it's the Data + being Proxied. + + :param root: + Optional handle on a root DataProxy/Config which needs notification + on data updates. + + :param tuple keypath: + Optional tuple describing the path of keys leading to this + DataProxy's location inside the ``root`` structure. Required if + ``root`` was given (and vice versa.) + + .. versionadded:: 1.0 + """ + obj = cls() + obj._set(_config=data) + obj._set(_root=root) + obj._set(_keypath=keypath) + return obj + + def __getattr__(self, key: str) -> Any: + # NOTE: due to default Python attribute-lookup semantics, "real" + # attributes will always be yielded on attribute access and this method + # is skipped. That behavior is good for us (it's more intuitive than + # having a config key accidentally shadow a real attribute or method). + try: + return self._get(key) + except KeyError: + # Proxy most special vars to config for dict procotol. + if key in self._proxies: + return getattr(self._config, key) + # Otherwise, raise useful AttributeError to follow getattr proto. + err = "No attribute or config key found for {!r}".format(key) + attrs = [x for x in dir(self.__class__) if not x.startswith("_")] + err += "\n\nValid keys: {!r}".format( + sorted(list(self._config.keys())) + ) + err += "\n\nValid real attributes: {!r}".format(attrs) + raise AttributeError(err) + + def __setattr__(self, key: str, value: Any) -> None: + # Turn attribute-sets into config updates anytime we don't have a real + # attribute with the given name/key. + has_real_attr = key in dir(self) + if not has_real_attr: + # Make sure to trigger our own __setitem__ instead of going direct + # to our internal dict/cache + self[key] = value + else: + super().__setattr__(key, value) + + def __iter__(self) -> Iterator[Dict[str, Any]]: + # For some reason Python is ignoring our __hasattr__ when determining + # whether we support __iter__. BOO + return iter(self._config) + + def __eq__(self, other: object) -> bool: + # NOTE: Can't proxy __eq__ because the RHS will always be an obj of the + # current class, not the proxied-to class, and that causes + # NotImplemented. + # Try comparing to other objects like ourselves, falling back to a not + # very comparable value (None) so comparison fails. + other_val = getattr(other, "_config", None) + # But we can compare to vanilla dicts just fine, since our _config is + # itself just a dict. + if isinstance(other, dict): + other_val = other + return bool(self._config == other_val) + + def __len__(self) -> int: + return len(self._config) + + def __setitem__(self, key: str, value: str) -> None: + self._config[key] = value + self._track_modification_of(key, value) + + def __getitem__(self, key: str) -> Any: + return self._get(key) + + def _get(self, key: str) -> Any: + # Short-circuit if pickling/copying mechanisms are asking if we've got + # __setstate__ etc; they'll ask this w/o calling our __init__ first, so + # we'd be in a RecursionError-causing catch-22 otherwise. + if key in ("__setstate__",): + raise AttributeError(key) + # At this point we should be able to assume a self._config... + value = self._config[key] + if isinstance(value, dict): + # New object's keypath is simply the key, prepended with our own + # keypath if we've got one. + keypath = (key,) + if hasattr(self, "_keypath"): + keypath = self._keypath + keypath + # If we have no _root, we must be the root, so it's us. Otherwise, + # pass along our handle on the root. + root = getattr(self, "_root", self) + value = DataProxy.from_data(data=value, root=root, keypath=keypath) + return value + + def _set(self, *args: Any, **kwargs: Any) -> None: + """ + Convenience workaround of default 'attrs are config keys' behavior. + + Uses `object.__setattr__` to work around the class' normal proxying + behavior, but is less verbose than using that directly. + + Has two modes (which may be combined if you really want): + + - ``self._set('attrname', value)``, just like ``__setattr__`` + - ``self._set(attname=value)`` (i.e. kwargs), even less typing. + """ + if args: + object.__setattr__(self, *args) + for key, value in kwargs.items(): + object.__setattr__(self, key, value) + + def __repr__(self) -> str: + return "<{}: {}>".format(self.__class__.__name__, self._config) + + def __contains__(self, key: str) -> bool: + return key in self._config + + @property + def _is_leaf(self) -> bool: + return hasattr(self, "_root") + + @property + def _is_root(self) -> bool: + return hasattr(self, "_modify") + + def _track_removal_of(self, key: str) -> None: + # Grab the root object responsible for tracking removals; either the + # referenced root (if we're a leaf) or ourselves (if we're not). + # (Intermediate nodes never have anything but __getitem__ called on + # them, otherwise they're by definition being treated as a leaf.) + target = None + if self._is_leaf: + target = self._root + elif self._is_root: + target = self + if target is not None: + target._remove(getattr(self, "_keypath", tuple()), key) + + def _track_modification_of(self, key: str, value: str) -> None: + target = None + if self._is_leaf: + target = self._root + elif self._is_root: + target = self + if target is not None: + target._modify(getattr(self, "_keypath", tuple()), key, value) + + def __delitem__(self, key: str) -> None: + del self._config[key] + self._track_removal_of(key) + + def __delattr__(self, name: str) -> None: + # Make sure we don't screw up true attribute deletion for the + # situations that actually want it. (Uncommon, but not rare.) + if name in self: + del self[name] + else: + object.__delattr__(self, name) + + def clear(self) -> None: + keys = list(self.keys()) + for key in keys: + del self[key] + + def pop(self, *args: Any) -> Any: + # Must test this up front before (possibly) mutating self._config + key_existed = args and args[0] in self._config + # We always have a _config (whether it's a real dict or a cache of + # merged levels) so we can fall back to it for all the corner case + # handling re: args (arity, handling a default, raising KeyError, etc) + ret = self._config.pop(*args) + # If it looks like no popping occurred (key wasn't there), presumably + # user gave default, so we can short-circuit return here - no need to + # track a deletion that did not happen. + if not key_existed: + return ret + # Here, we can assume at least the 1st posarg (key) existed. + self._track_removal_of(args[0]) + # In all cases, return the popped value. + return ret + + def popitem(self) -> Any: + ret = self._config.popitem() + self._track_removal_of(ret[0]) + return ret + + def setdefault(self, *args: Any) -> Any: + # Must test up front whether the key existed beforehand + key_existed = args and args[0] in self._config + # Run locally + ret = self._config.setdefault(*args) + # Key already existed -> nothing was mutated, short-circuit + if key_existed: + return ret + # Here, we can assume the key did not exist and thus user must have + # supplied a 'default' (if they did not, the real setdefault() above + # would have excepted.) + key, default = args + self._track_modification_of(key, default) + return ret + + def update(self, *args: Any, **kwargs: Any) -> None: + if kwargs: + for key, value in kwargs.items(): + self[key] = value + elif args: + # TODO: complain if arity>1 + arg = args[0] + if isinstance(arg, dict): + for key in arg: + self[key] = arg[key] + else: + # TODO: be stricter about input in this case + for pair in arg: + self[pair[0]] = pair[1] + + +class Config(DataProxy): + """ + Invoke's primary configuration handling class. + + See :doc:`/concepts/configuration` for details on the configuration system + this class implements, including the :ref:`configuration hierarchy + `. The rest of this class' documentation assumes + familiarity with that document. + + **Access** + + Configuration values may be accessed and/or updated using dict syntax:: + + config['foo'] + + or attribute syntax:: + + config.foo + + Nesting works the same way - dict config values are turned into objects + which honor both the dictionary protocol and the attribute-access method:: + + config['foo']['bar'] + config.foo.bar + + **A note about attribute access and methods** + + This class implements the entire dictionary protocol: methods such as + ``keys``, ``values``, ``items``, ``pop`` and so forth should all function + as they do on regular dicts. It also implements new config-specific methods + such as `load_system`, `load_collection`, `merge`, `clone`, etc. + + .. warning:: + Accordingly, this means that if you have configuration options sharing + names with these methods, you **must** use dictionary syntax (e.g. + ``myconfig['keys']``) to access the configuration data. + + **Lifecycle** + + At initialization time, `.Config`: + + - creates per-level data structures; + - stores any levels supplied to `__init__`, such as defaults or overrides, + as well as the various config file paths/filename patterns; + - and loads config files, if found (though typically this just means system + and user-level files, as project and runtime files need more info before + they can be found and loaded.) + + - This step can be skipped by specifying ``lazy=True``. + + At this point, `.Config` is fully usable - and because it pre-emptively + loads some config files, those config files can affect anything that + comes after, like CLI parsing or loading of task collections. + + In the CLI use case, further processing is done after instantiation, using + the ``load_*`` methods such as `load_overrides`, `load_project`, etc: + + - the result of argument/option parsing is applied to the overrides level; + - a project-level config file is loaded, as it's dependent on a loaded + tasks collection; + - a runtime config file is loaded, if its flag was supplied; + - then, for each task being executed: + + - per-collection data is loaded (only possible now that we have + collection & task in hand); + - shell environment data is loaded (must be done at end of process due + to using the rest of the config as a guide for interpreting env var + names.) + + At this point, the config object is handed to the task being executed, as + part of its execution `.Context`. + + Any modifications made directly to the `.Config` itself after this point + end up stored in their own (topmost) config level, making it easier to + debug final values. + + Finally, any *deletions* made to the `.Config` (e.g. applications of + dict-style mutators like ``pop``, ``clear`` etc) are also tracked in their + own structure, allowing the config object to honor such method calls + without mutating the underlying source data. + + **Special class attributes** + + The following class-level attributes are used for low-level configuration + of the config system itself, such as which file paths to load. They are + primarily intended for overriding by subclasses. + + - ``prefix``: Supplies the default value for ``file_prefix`` (directly) and + ``env_prefix`` (uppercased). See their descriptions for details. Its + default value is ``"invoke"``. + - ``file_prefix``: The config file 'basename' default (though it is not a + literal basename; it can contain path parts if desired) which is appended + to the configured values of ``system_prefix``, ``user_prefix``, etc, to + arrive at the final (pre-extension) file paths. + + Thus, by default, a system-level config file path concatenates the + ``system_prefix`` of ``/etc/`` with the ``file_prefix`` of ``invoke`` to + arrive at paths like ``/etc/invoke.json``. + + Defaults to ``None``, meaning to use the value of ``prefix``. + + - ``env_prefix``: A prefix used (along with a joining underscore) to + determine which environment variables are loaded as the env var + configuration level. Since its default is the value of ``prefix`` + capitalized, this means env vars like ``INVOKE_RUN_ECHO`` are sought by + default. + + Defaults to ``None``, meaning to use the value of ``prefix``. + + .. versionadded:: 1.0 + """ + + prefix = "invoke" + file_prefix = None + env_prefix = None + + @staticmethod + def global_defaults() -> Dict[str, Any]: + """ + Return the core default settings for Invoke. + + Generally only for use by `.Config` internals. For descriptions of + these values, see :ref:`default-values`. + + Subclasses may choose to override this method, calling + ``Config.global_defaults`` and applying `.merge_dicts` to the result, + to add to or modify these values. + + .. versionadded:: 1.0 + """ + # On Windows, which won't have /bin/bash, check for a set COMSPEC env + # var (https://en.wikipedia.org/wiki/COMSPEC) or fallback to an + # unqualified cmd.exe otherwise. + if WINDOWS: + shell = os.environ.get("COMSPEC", "cmd.exe") + # Else, assume Unix, most distros of which have /bin/bash available. + # TODO: consider an automatic fallback to /bin/sh for systems lacking + # /bin/bash; however users may configure run.shell quite easily, so... + else: + shell = "/bin/bash" + + return { + # TODO: we document 'debug' but it's not truly implemented outside + # of env var and CLI flag. If we honor it, we have to go around and + # figure out at what points we might want to call + # `util.enable_logging`: + # - just using it as a fallback default for arg parsing isn't much + # use, as at that point the config holds nothing but defaults & CLI + # flag values + # - doing it at file load time might be somewhat useful, though + # where this happens may be subject to change soon + # - doing it at env var load time seems a bit silly given the + # existing support for at-startup testing for INVOKE_DEBUG + # 'debug': False, + # TODO: I feel like we want these to be more consistent re: default + # values stored here vs 'stored' as logic where they are + # referenced, there are probably some bits that are all "if None -> + # default" that could go here. Alternately, make _more_ of these + # default to None? + "run": { + "asynchronous": False, + "disown": False, + "dry": False, + "echo": False, + "echo_stdin": None, + "encoding": None, + "env": {}, + "err_stream": None, + "fallback": True, + "hide": None, + "in_stream": None, + "out_stream": None, + "echo_format": "\033[1;37m{command}\033[0m", + "pty": False, + "replace_env": False, + "shell": shell, + "warn": False, + "watchers": [], + }, + # This doesn't live inside the 'run' tree; otherwise it'd make it + # somewhat harder to extend/override in Fabric 2 which has a split + # local/remote runner situation. + "runners": {"local": Local}, + "sudo": { + "password": None, + "prompt": "[sudo] password: ", + "user": None, + }, + "tasks": { + "auto_dash_names": True, + "collection_name": "tasks", + "dedupe": True, + "executor_class": None, + "ignore_unknown_help": False, + "search_root": None, + }, + "timeouts": {"command": None}, + } + + def __init__( + self, + overrides: Optional[Dict[str, Any]] = None, + defaults: Optional[Dict[str, Any]] = None, + system_prefix: Optional[str] = None, + user_prefix: Optional[str] = None, + project_location: Optional[PathLike] = None, + runtime_path: Optional[PathLike] = None, + lazy: bool = False, + ): + """ + Creates a new config object. + + :param dict defaults: + A dict containing default (lowest level) config data. Default: + `global_defaults`. + + :param dict overrides: + A dict containing override-level config data. Default: ``{}``. + + :param str system_prefix: + Base path for the global config file location; combined with the + prefix and file suffixes to arrive at final file path candidates. + + Default: ``/etc/`` (thus e.g. ``/etc/invoke.yaml`` or + ``/etc/invoke.json``). + + :param str user_prefix: + Like ``system_prefix`` but for the per-user config file. These + variables are joined as strings, not via path-style joins, so they + may contain partial file paths; for the per-user config file this + often means a leading dot, to make the final result a hidden file + on most systems. + + Default: ``~/.`` (e.g. ``~/.invoke.yaml``). + + :param str project_location: + Optional directory path of the currently loaded `.Collection` (as + loaded by `.Loader`). When non-empty, will trigger seeking of + per-project config files in this directory. + + :param str runtime_path: + Optional file path to a runtime configuration file. + + Used to fill the penultimate slot in the config hierarchy. Should + be a full file path to an existing file, not a directory path or a + prefix. + + :param bool lazy: + Whether to automatically load some of the lower config levels. + + By default (``lazy=False``), ``__init__`` automatically calls + `load_system` and `load_user` to load system and user config files, + respectively. + + For more control over what is loaded when, you can say + ``lazy=True``, and no automatic loading is done. + + .. note:: + If you give ``defaults`` and/or ``overrides`` as ``__init__`` + kwargs instead of waiting to use `load_defaults` or + `load_overrides` afterwards, those *will* still end up 'loaded' + immediately. + """ + # Technically an implementation detail - do not expose in public API. + # Stores merged configs and is accessed via DataProxy. + self._set(_config={}) + + # Config file suffixes to search, in preference order. + self._set(_file_suffixes=("yaml", "yml", "json", "py")) + + # Default configuration values, typically a copy of `global_defaults`. + if defaults is None: + defaults = copy_dict(self.global_defaults()) + self._set(_defaults=defaults) + + # Collection-driven config data, gathered from the collection tree + # containing the currently executing task. + self._set(_collection={}) + + # Path prefix searched for the system config file. + # NOTE: There is no default system prefix on Windows. + if system_prefix is None and not WINDOWS: + system_prefix = "/etc/" + self._set(_system_prefix=system_prefix) + # Path to loaded system config file, if any. + self._set(_system_path=None) + # Whether the system config file has been loaded or not (or ``None`` if + # no loading has been attempted yet.) + self._set(_system_found=None) + # Data loaded from the system config file. + self._set(_system={}) + + # Path prefix searched for per-user config files. + if user_prefix is None: + user_prefix = "~/." + self._set(_user_prefix=user_prefix) + # Path to loaded user config file, if any. + self._set(_user_path=None) + # Whether the user config file has been loaded or not (or ``None`` if + # no loading has been attempted yet.) + self._set(_user_found=None) + # Data loaded from the per-user config file. + self._set(_user={}) + + # As it may want to be set post-init, project conf file related attrs + # get initialized or overwritten via a specific method. + self.set_project_location(project_location) + + # Environment variable name prefix + env_prefix = self.env_prefix + if env_prefix is None: + env_prefix = self.prefix + env_prefix = "{}_".format(env_prefix.upper()) + self._set(_env_prefix=env_prefix) + # Config data loaded from the shell environment. + self._set(_env={}) + + # As it may want to be set post-init, runtime conf file related attrs + # get initialized or overwritten via a specific method. + self.set_runtime_path(runtime_path) + + # Overrides - highest normal config level. Typically filled in from + # command-line flags. + if overrides is None: + overrides = {} + self._set(_overrides=overrides) + + # Absolute highest level: user modifications. + self._set(_modifications={}) + # And its sibling: user deletions. (stored as a flat dict of keypath + # keys and dummy values, for constant-time membership testing/removal + # w/ no messy recursion. TODO: maybe redo _everything_ that way? in + # _modifications and other levels, the values would of course be + # valuable and not just None) + self._set(_deletions={}) + + # Convenience loading of user and system files, since those require no + # other levels in order to function. + if not lazy: + self.load_base_conf_files() + # Always merge, otherwise defaults, etc are not usable until creator or + # a subroutine does so. + self.merge() + + def load_base_conf_files(self) -> None: + # Just a refactor of something done in unlazy init or in clone() + self.load_system(merge=False) + self.load_user(merge=False) + + def load_defaults(self, data: Dict[str, Any], merge: bool = True) -> None: + """ + Set or replace the 'defaults' configuration level, from ``data``. + + :param dict data: The config data to load as the defaults level. + + :param bool merge: + Whether to merge the loaded data into the central config. Default: + ``True``. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + self._set(_defaults=data) + if merge: + self.merge() + + def load_overrides(self, data: Dict[str, Any], merge: bool = True) -> None: + """ + Set or replace the 'overrides' configuration level, from ``data``. + + :param dict data: The config data to load as the overrides level. + + :param bool merge: + Whether to merge the loaded data into the central config. Default: + ``True``. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + self._set(_overrides=data) + if merge: + self.merge() + + def load_system(self, merge: bool = True) -> None: + """ + Load a system-level config file, if possible. + + Checks the configured ``_system_prefix`` path, which defaults to + ``/etc``, and will thus load files like ``/etc/invoke.yml``. + + :param bool merge: + Whether to merge the loaded data into the central config. Default: + ``True``. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + self._load_file(prefix="system", merge=merge) + + def load_user(self, merge: bool = True) -> None: + """ + Load a user-level config file, if possible. + + Checks the configured ``_user_prefix`` path, which defaults to ``~/.``, + and will thus load files like ``~/.invoke.yml``. + + :param bool merge: + Whether to merge the loaded data into the central config. Default: + ``True``. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + self._load_file(prefix="user", merge=merge) + + def load_project(self, merge: bool = True) -> None: + """ + Load a project-level config file, if possible. + + Checks the configured ``_project_prefix`` value derived from the path + given to `set_project_location`, which is typically set to the + directory containing the loaded task collection. + + Thus, if one were to run the CLI tool against a tasks collection + ``/home/myuser/code/tasks.py``, `load_project` would seek out files + like ``/home/myuser/code/invoke.yml``. + + :param bool merge: + Whether to merge the loaded data into the central config. Default: + ``True``. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + self._load_file(prefix="project", merge=merge) + + def set_runtime_path(self, path: Optional[PathLike]) -> None: + """ + Set the runtime config file path. + + .. versionadded:: 1.0 + """ + # Path to the user-specified runtime config file. + self._set(_runtime_path=path) + # Data loaded from the runtime config file. + self._set(_runtime={}) + # Whether the runtime config file has been loaded or not (or ``None`` + # if no loading has been attempted yet.) + self._set(_runtime_found=None) + + def load_runtime(self, merge: bool = True) -> None: + """ + Load a runtime-level config file, if one was specified. + + When the CLI framework creates a `Config`, it sets ``_runtime_path``, + which is a full path to the requested config file. This method attempts + to load that file. + + :param bool merge: + Whether to merge the loaded data into the central config. Default: + ``True``. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + self._load_file(prefix="runtime", absolute=True, merge=merge) + + def load_shell_env(self) -> None: + """ + Load values from the shell environment. + + `.load_shell_env` is intended for execution late in a `.Config` + object's lifecycle, once all other sources (such as a runtime config + file or per-collection configurations) have been loaded. Loading from + the shell is not terrifically expensive, but must be done at a specific + point in time to ensure the "only known config keys are loaded from the + env" behavior works correctly. + + See :ref:`env-vars` for details on this design decision and other info + re: how environment variables are scanned and loaded. + + .. versionadded:: 1.0 + """ + # Force merge of existing data to ensure we have an up to date picture + debug("Running pre-merge for shell env loading...") + self.merge() + debug("Done with pre-merge.") + loader = Environment(config=self._config, prefix=self._env_prefix) + self._set(_env=loader.load()) + debug("Loaded shell environment, triggering final merge") + self.merge() + + def load_collection( + self, data: Dict[str, Any], merge: bool = True + ) -> None: + """ + Update collection-driven config data. + + `.load_collection` is intended for use by the core task execution + machinery, which is responsible for obtaining collection-driven data. + See :ref:`collection-configuration` for details. + + .. versionadded:: 1.0 + """ + debug("Loading collection configuration") + self._set(_collection=data) + if merge: + self.merge() + + def set_project_location(self, path: Union[PathLike, str, None]) -> None: + """ + Set the directory path where a project-level config file may be found. + + Does not do any file loading on its own; for that, see `load_project`. + + .. versionadded:: 1.0 + """ + # 'Prefix' to match the other sets of attrs + project_prefix = None + if path is not None: + # Ensure the prefix is normalized to a directory-like path string + project_prefix = join(path, "") + self._set(_project_prefix=project_prefix) + # Path to loaded per-project config file, if any. + self._set(_project_path=None) + # Whether the project config file has been loaded or not (or ``None`` + # if no loading has been attempted yet.) + self._set(_project_found=None) + # Data loaded from the per-project config file. + self._set(_project={}) + + def _load_file( + self, prefix: str, absolute: bool = False, merge: bool = True + ) -> None: + # Setup + found = "_{}_found".format(prefix) + path = "_{}_path".format(prefix) + data = "_{}".format(prefix) + midfix = self.file_prefix + if midfix is None: + midfix = self.prefix + # Short-circuit if loading appears to have occurred already + if getattr(self, found) is not None: + return + # Moar setup + if absolute: + absolute_path = getattr(self, path) + # None -> expected absolute path but none set, short circuit + if absolute_path is None: + return + paths = [absolute_path] + else: + path_prefix = getattr(self, "_{}_prefix".format(prefix)) + # Short circuit if loading seems unnecessary (eg for project config + # files when not running out of a project) + if path_prefix is None: + return + paths = [ + ".".join((path_prefix + midfix, x)) + for x in self._file_suffixes + ] + # Poke 'em + for filepath in paths: + # Normalize + filepath = expanduser(filepath) + try: + try: + type_ = splitext(filepath)[1].lstrip(".") + loader = getattr(self, "_load_{}".format(type_)) + except AttributeError: + msg = "Config files of type {!r} (from file {!r}) are not supported! Please use one of: {!r}" # noqa + raise UnknownFileType( + msg.format(type_, filepath, self._file_suffixes) + ) + # Store data, the path it was found at, and fact that it was + # found + self._set(data, loader(filepath)) + self._set(path, filepath) + self._set(found, True) + break + # Typically means 'no such file', so just note & skip past. + except IOError as e: + if e.errno == 2: + err = "Didn't see any {}, skipping." + debug(err.format(filepath)) + else: + raise + # Still None -> no suffixed paths were found, record this fact + if getattr(self, path) is None: + self._set(found, False) + # Merge loaded data in if any was found + elif merge: + self.merge() + + def _load_yaml(self, path: PathLike) -> Any: + with open(path) as fd: + return yaml.safe_load(fd) + + _load_yml = _load_yaml + + def _load_json(self, path: PathLike) -> Any: + with open(path) as fd: + return json.load(fd) + + def _load_py(self, path: str) -> Dict[str, Any]: + data = {} + for key, value in (load_source("mod", path)).items(): + # Strip special members, as these are always going to be builtins + # and other special things a user will not want in their config. + if key.startswith("__"): + continue + # Raise exceptions on module values; they are unpicklable. + # TODO: suck it up and reimplement copy() without pickling? Then + # again, a user trying to stuff a module into their config is + # probably doing something better done in runtime/library level + # code and not in a "config file"...right? + if isinstance(value, types.ModuleType): + err = "'{}' is a module, which can't be used as a config value. (Are you perhaps giving a tasks file instead of a config file by mistake?)" # noqa + raise UnpicklableConfigMember(err.format(key)) + data[key] = value + return data + + def merge(self) -> None: + """ + Merge all config sources, in order. + + .. versionadded:: 1.0 + """ + debug("Merging config sources in order onto new empty _config...") + self._set(_config={}) + debug("Defaults: {!r}".format(self._defaults)) + merge_dicts(self._config, self._defaults) + debug("Collection-driven: {!r}".format(self._collection)) + merge_dicts(self._config, self._collection) + self._merge_file("system", "System-wide") + self._merge_file("user", "Per-user") + self._merge_file("project", "Per-project") + debug("Environment variable config: {!r}".format(self._env)) + merge_dicts(self._config, self._env) + self._merge_file("runtime", "Runtime") + debug("Overrides: {!r}".format(self._overrides)) + merge_dicts(self._config, self._overrides) + debug("Modifications: {!r}".format(self._modifications)) + merge_dicts(self._config, self._modifications) + debug("Deletions: {!r}".format(self._deletions)) + obliterate(self._config, self._deletions) + + def _merge_file(self, name: str, desc: str) -> None: + # Setup + desc += " config file" # yup + found = getattr(self, "_{}_found".format(name)) + path = getattr(self, "_{}_path".format(name)) + data = getattr(self, "_{}".format(name)) + # None -> no loading occurred yet + if found is None: + debug("{} has not been loaded yet, skipping".format(desc)) + # True -> hooray + elif found: + debug("{} ({}): {!r}".format(desc, path, data)) + merge_dicts(self._config, data) + # False -> did try, did not succeed + else: + # TODO: how to preserve what was tried for each case but only for + # the negative? Just a branch here based on 'name'? + debug("{} not found, skipping".format(desc)) + + def clone(self, into: Optional[Type["Config"]] = None) -> "Config": + """ + Return a copy of this configuration object. + + The new object will be identical in terms of configured sources and any + loaded (or user-manipulated) data, but will be a distinct object with + as little shared mutable state as possible. + + Specifically, all `dict` values within the config are recursively + recreated, with non-dict leaf values subjected to `copy.copy` (note: + *not* `copy.deepcopy`, as this can cause issues with various objects + such as compiled regexen or threading locks, often found buried deep + within rich aggregates like API or DB clients). + + The only remaining config values that may end up shared between a + config and its clone are thus those 'rich' objects that do not + `copy.copy` cleanly, or compound non-dict objects (such as lists or + tuples). + + :param into: + A `.Config` subclass that the new clone should be "upgraded" to. + + Used by client libraries which have their own `.Config` subclasses + that e.g. define additional defaults; cloning "into" one of these + subclasses ensures that any new keys/subtrees are added gracefully, + without overwriting anything that may have been pre-defined. + + Default: ``None`` (just clone into another regular `.Config`). + + :returns: + A `.Config`, or an instance of the class given to ``into``. + + .. versionadded:: 1.0 + """ + # Construct new object + klass = self.__class__ if into is None else into + # Also allow arbitrary constructor kwargs, for subclasses where passing + # (some) data in at init time is desired (vs post-init copying) + # TODO: probably want to pivot the whole class this way eventually...? + # No longer recall exactly why we went with the 'fresh init + attribute + # setting' approach originally...tho there's clearly some impedance + # mismatch going on between "I want stuff to happen in my config's + # instantiation" and "I want cloning to not trigger certain things like + # external data source loading". + # NOTE: this will include lazy=True, see end of method + new = klass(**self._clone_init_kwargs(into=into)) + # Copy/merge/etc all 'private' data sources and attributes + for name in """ + collection + system_prefix + system_path + system_found + system + user_prefix + user_path + user_found + user + project_prefix + project_path + project_found + project + env_prefix + env + runtime_path + runtime_found + runtime + overrides + modifications + """.split(): + name = "_{}".format(name) + my_data = getattr(self, name) + # Non-dict data gets carried over straight (via a copy()) + # NOTE: presumably someone could really screw up and change these + # values' types, but at that point it's on them... + if not isinstance(my_data, dict): + new._set(name, copy.copy(my_data)) + # Dict data gets merged (which also involves a copy.copy + # eventually) + else: + merge_dicts(getattr(new, name), my_data) + # Do what __init__ would've done if not lazy, i.e. load user/system + # conf files. + new.load_base_conf_files() + # Finally, merge() for reals (_load_base_conf_files doesn't do so + # internally, so that data wouldn't otherwise show up.) + new.merge() + return new + + def _clone_init_kwargs( + self, into: Optional[Type["Config"]] = None + ) -> Dict[str, Any]: + """ + Supply kwargs suitable for initializing a new clone of this object. + + Note that most of the `.clone` process involves copying data between + two instances instead of passing init kwargs; however, sometimes you + really do want init kwargs, which is why this method exists. + + :param into: The value of ``into`` as passed to the calling `.clone`. + + :returns: A `dict`. + """ + # NOTE: must pass in defaults fresh or otherwise global_defaults() gets + # used instead. Except when 'into' is in play, in which case we truly + # want the union of the two. + new_defaults = copy_dict(self._defaults) + if into is not None: + merge_dicts(new_defaults, into.global_defaults()) + # The kwargs. + return dict( + defaults=new_defaults, + # TODO: consider making this 'hardcoded' on the calling end (ie + # inside clone()) to make sure nobody accidentally nukes it via + # subclassing? + lazy=True, + ) + + def _modify(self, keypath: Tuple[str, ...], key: str, value: str) -> None: + """ + Update our user-modifications config level with new data. + + :param tuple keypath: + The key path identifying the sub-dict being updated. May be an + empty tuple if the update is occurring at the topmost level. + + :param str key: + The actual key receiving an update. + + :param value: + The value being written. + """ + # First, ensure we wipe the keypath from _deletions, in case it was + # previously deleted. + excise(self._deletions, keypath + (key,)) + # Now we can add it to the modifications structure. + data = self._modifications + keypath_list = list(keypath) + while keypath_list: + subkey = keypath_list.pop(0) + # TODO: could use defaultdict here, but...meh? + if subkey not in data: + # TODO: generify this and the subsequent 3 lines... + data[subkey] = {} + data = data[subkey] + data[key] = value + self.merge() + + def _remove(self, keypath: Tuple[str, ...], key: str) -> None: + """ + Like `._modify`, but for removal. + """ + # NOTE: because deletions are processed in merge() last, we do not need + # to remove things from _modifications on removal; but we *do* do the + # inverse - remove from _deletions on modification. + # TODO: may be sane to push this step up to callers? + data = self._deletions + keypath_list = list(keypath) + while keypath_list: + subkey = keypath_list.pop(0) + if subkey in data: + data = data[subkey] + # If we encounter None, it means something higher up than our + # requested keypath is already marked as deleted; so we don't + # have to do anything or go further. + if data is None: + return + # Otherwise it's presumably another dict, so keep looping... + else: + # Key not found -> nobody's marked anything along this part of + # the path for deletion, so we'll start building it out. + data[subkey] = {} + # Then prep for next iteration + data = data[subkey] + # Exited loop -> data must be the leafmost dict, so we can now set our + # deleted key to None + data[key] = None + self.merge() + + +class AmbiguousMergeError(ValueError): + pass + + +def merge_dicts( + base: Dict[str, Any], updates: Dict[str, Any] +) -> Dict[str, Any]: + """ + Recursively merge dict ``updates`` into dict ``base`` (mutating ``base``.) + + * Values which are themselves dicts will be recursed into. + * Values which are a dict in one input and *not* a dict in the other input + (e.g. if our inputs were ``{'foo': 5}`` and ``{'foo': {'bar': 5}}``) are + irreconciliable and will generate an exception. + * Non-dict leaf values are run through `copy.copy` to avoid state bleed. + + .. note:: + This is effectively a lightweight `copy.deepcopy` which offers + protection from mismatched types (dict vs non-dict) and avoids some + core deepcopy problems (such as how it explodes on certain object + types). + + :returns: + The value of ``base``, which is mostly useful for wrapper functions + like `copy_dict`. + + .. versionadded:: 1.0 + """ + # TODO: for chrissakes just make it return instead of mutating? + for key, value in (updates or {}).items(): + # Dict values whose keys also exist in 'base' -> recurse + # (But only if both types are dicts.) + if key in base: + if isinstance(value, dict): + if isinstance(base[key], dict): + merge_dicts(base[key], value) + else: + raise _merge_error(base[key], value) + else: + if isinstance(base[key], dict): + raise _merge_error(base[key], value) + # Fileno-bearing objects are probably 'real' files which do not + # copy well & must be passed by reference. Meh. + elif hasattr(value, "fileno"): + base[key] = value + else: + base[key] = copy.copy(value) + # New values get set anew + else: + # Dict values get reconstructed to avoid being references to the + # updates dict, which can lead to nasty state-bleed bugs otherwise + if isinstance(value, dict): + base[key] = copy_dict(value) + # Fileno-bearing objects are probably 'real' files which do not + # copy well & must be passed by reference. Meh. + elif hasattr(value, "fileno"): + base[key] = value + # Non-dict values just get set straight + else: + base[key] = copy.copy(value) + return base + + +def _merge_error(orig: object, new: object) -> AmbiguousMergeError: + return AmbiguousMergeError( + "Can't cleanly merge {} with {}".format( + _format_mismatch(orig), _format_mismatch(new) + ) + ) + + +def _format_mismatch(x: object) -> str: + return "{} ({!r})".format(type(x), x) + + +def copy_dict(source: Dict[str, Any]) -> Dict[str, Any]: + """ + Return a fresh copy of ``source`` with as little shared state as possible. + + Uses `merge_dicts` under the hood, with an empty ``base`` dict; see its + documentation for details on behavior. + + .. versionadded:: 1.0 + """ + return merge_dicts({}, source) + + +def excise(dict_: Dict[str, Any], keypath: Tuple[str, ...]) -> None: + """ + Remove key pointed at by ``keypath`` from nested dict ``dict_``, if exists. + + .. versionadded:: 1.0 + """ + data = dict_ + keypath_list = list(keypath) + leaf_key = keypath_list.pop() + while keypath_list: + key = keypath_list.pop(0) + if key not in data: + # Not there, nothing to excise + return + data = data[key] + if leaf_key in data: + del data[leaf_key] + + +def obliterate(base: Dict[str, Any], deletions: Dict[str, Any]) -> None: + """ + Remove all (nested) keys mentioned in ``deletions``, from ``base``. + + .. versionadded:: 1.0 + """ + for key, value in deletions.items(): + if isinstance(value, dict): + # NOTE: not testing for whether base[key] exists; if something's + # listed in a deletions structure, it must exist in some source + # somewhere, and thus also in the cache being obliterated. + obliterate(base[key], deletions[key]) + else: # implicitly None + del base[key] diff --git a/.venv/lib/python3.9/site-packages/invoke/context.py b/.venv/lib/python3.9/site-packages/invoke/context.py new file mode 100644 index 0000000..e9beaf4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/context.py @@ -0,0 +1,602 @@ +import os +import re +from contextlib import contextmanager +from itertools import cycle +from os import PathLike +from typing import ( + TYPE_CHECKING, + Any, + Generator, + Iterator, + List, + Optional, + Union, +) +from unittest.mock import Mock + +from .config import Config, DataProxy +from .exceptions import Failure, AuthFailure, ResponseNotAccepted +from .runners import Result +from .watchers import FailingResponder + +if TYPE_CHECKING: + from invoke.runners import Runner + + +class Context(DataProxy): + """ + Context-aware API wrapper & state-passing object. + + `.Context` objects are created during command-line parsing (or, if desired, + by hand) and used to share parser and configuration state with executed + tasks (see :ref:`why-context`). + + Specifically, the class offers wrappers for core API calls (such as `.run`) + which take into account CLI parser flags, configuration files, and/or + changes made at runtime. It also acts as a proxy for its `~.Context.config` + attribute - see that attribute's documentation for details. + + Instances of `.Context` may be shared between tasks when executing + sub-tasks - either the same context the caller was given, or an altered + copy thereof (or, theoretically, a brand new one). + + .. versionadded:: 1.0 + """ + + def __init__(self, config: Optional[Config] = None) -> None: + """ + :param config: + `.Config` object to use as the base configuration. + + Defaults to an anonymous/default `.Config` instance. + """ + #: The fully merged `.Config` object appropriate for this context. + #: + #: `.Config` settings (see their documentation for details) may be + #: accessed like dictionary keys (``c.config['foo']``) or object + #: attributes (``c.config.foo``). + #: + #: As a convenience shorthand, the `.Context` object proxies to its + #: ``config`` attribute in the same way - e.g. ``c['foo']`` or + #: ``c.foo`` returns the same value as ``c.config['foo']``. + config = config if config is not None else Config() + self._set(_config=config) + #: A list of commands to run (via "&&") before the main argument to any + #: `run` or `sudo` calls. Note that the primary API for manipulating + #: this list is `prefix`; see its docs for details. + command_prefixes: List[str] = list() + self._set(command_prefixes=command_prefixes) + #: A list of directories to 'cd' into before running commands with + #: `run` or `sudo`; intended for management via `cd`, please see its + #: docs for details. + command_cwds: List[str] = list() + self._set(command_cwds=command_cwds) + + @property + def config(self) -> Config: + # Allows Context to expose a .config attribute even though DataProxy + # otherwise considers it a config key. + return self._config + + @config.setter + def config(self, value: Config) -> None: + # NOTE: mostly used by client libraries needing to tweak a Context's + # config at execution time; i.e. a Context subclass that bears its own + # unique data may want to be stood up when parameterizing/expanding a + # call list at start of a session, with the final config filled in at + # runtime. + self._set(_config=value) + + def run(self, command: str, **kwargs: Any) -> Optional[Result]: + """ + Execute a local shell command, honoring config options. + + Specifically, this method instantiates a `.Runner` subclass (according + to the ``runner`` config option; default is `.Local`) and calls its + ``.run`` method with ``command`` and ``kwargs``. + + See `.Runner.run` for details on ``command`` and the available keyword + arguments. + + .. versionadded:: 1.0 + """ + runner = self.config.runners.local(self) + return self._run(runner, command, **kwargs) + + # NOTE: broken out of run() to allow for runner class injection in + # Fabric/etc, which needs to juggle multiple runner class types (local and + # remote). + def _run( + self, runner: "Runner", command: str, **kwargs: Any + ) -> Optional[Result]: + command = self._prefix_commands(command) + return runner.run(command, **kwargs) + + def sudo(self, command: str, **kwargs: Any) -> Optional[Result]: + """ + Execute a shell command via ``sudo`` with password auto-response. + + **Basics** + + This method is identical to `run` but adds a handful of + convenient behaviors around invoking the ``sudo`` program. It doesn't + do anything users could not do themselves by wrapping `run`, but the + use case is too common to make users reinvent these wheels themselves. + + .. note:: + If you intend to respond to sudo's password prompt by hand, just + use ``run("sudo command")`` instead! The autoresponding features in + this method will just get in your way. + + Specifically, `sudo`: + + * Places a `.FailingResponder` into the ``watchers`` kwarg (see + :doc:`/concepts/watchers`) which: + + * searches for the configured ``sudo`` password prompt; + * responds with the configured sudo password (``sudo.password`` + from the :doc:`configuration `); + * can tell when that response causes an authentication failure + (e.g. if the system requires a password and one was not + configured), and raises `.AuthFailure` if so. + + * Builds a ``sudo`` command string using the supplied ``command`` + argument, prefixed by various flags (see below); + * Executes that command via a call to `run`, returning the result. + + **Flags used** + + ``sudo`` flags used under the hood include: + + - ``-S`` to allow auto-responding of password via stdin; + - ``-p `` to explicitly state the prompt to use, so we can be + sure our auto-responder knows what to look for; + - ``-u `` if ``user`` is not ``None``, to execute the command as + a user other than ``root``; + - When ``-u`` is present, ``-H`` is also added, to ensure the + subprocess has the requested user's ``$HOME`` set properly. + + **Configuring behavior** + + There are a couple of ways to change how this method behaves: + + - Because it wraps `run`, it honors all `run` config parameters and + keyword arguments, in the same way that `run` does. + + - Thus, invocations such as ``c.sudo('command', echo=True)`` are + possible, and if a config layer (such as a config file or env + var) specifies that e.g. ``run.warn = True``, that too will take + effect under `sudo`. + + - `sudo` has its own set of keyword arguments (see below) and they are + also all controllable via the configuration system, under the + ``sudo.*`` tree. + + - Thus you could, for example, pre-set a sudo user in a config + file; such as an ``invoke.json`` containing ``{"sudo": {"user": + "someuser"}}``. + + :param str password: Runtime override for ``sudo.password``. + :param str user: Runtime override for ``sudo.user``. + + .. versionadded:: 1.0 + """ + runner = self.config.runners.local(self) + return self._sudo(runner, command, **kwargs) + + # NOTE: this is for runner injection; see NOTE above _run(). + def _sudo( + self, runner: "Runner", command: str, **kwargs: Any + ) -> Optional[Result]: + prompt = self.config.sudo.prompt + password = kwargs.pop("password", self.config.sudo.password) + user = kwargs.pop("user", self.config.sudo.user) + env = kwargs.get("env", {}) + # TODO: allow subclassing for 'get the password' so users who REALLY + # want lazy runtime prompting can have it easily implemented. + # TODO: want to print a "cleaner" echo with just 'sudo '; but + # hard to do as-is, obtaining config data from outside a Runner one + # holds is currently messy (could fix that), if instead we manually + # inspect the config ourselves that duplicates logic. NOTE: once we + # figure that out, there is an existing, would-fail-if-not-skipped test + # for this behavior in test/context.py. + # TODO: once that is done, though: how to handle "full debug" output + # exactly (display of actual, real full sudo command w/ -S and -p), in + # terms of API/config? Impl is easy, just go back to passing echo + # through to 'run'... + user_flags = "" + if user is not None: + user_flags = "-H -u {} ".format(user) + env_flags = "" + if env: + env_flags = "--preserve-env='{}' ".format(",".join(env.keys())) + command = self._prefix_commands(command) + cmd_str = "sudo -S -p '{}' {}{}{}".format( + prompt, env_flags, user_flags, command + ) + watcher = FailingResponder( + pattern=re.escape(prompt), + response="{}\n".format(password), + sentinel="Sorry, try again.\n", + ) + # Ensure we merge any user-specified watchers with our own. + # NOTE: If there are config-driven watchers, we pull those up to the + # kwarg level; that lets us merge cleanly without needing complex + # config-driven "override vs merge" semantics. + # TODO: if/when those semantics are implemented, use them instead. + # NOTE: config value for watchers defaults to an empty list; and we + # want to clone it to avoid actually mutating the config. + watchers = kwargs.pop("watchers", list(self.config.run.watchers)) + watchers.append(watcher) + try: + return runner.run(cmd_str, watchers=watchers, **kwargs) + except Failure as failure: + # Transmute failures driven by our FailingResponder, into auth + # failures - the command never even ran. + # TODO: wants to be a hook here for users that desire "override a + # bad config value for sudo.password" manual input + # NOTE: as noted in #294 comments, we MAY in future want to update + # this so run() is given ability to raise AuthFailure on its own. + # For now that has been judged unnecessary complexity. + if isinstance(failure.reason, ResponseNotAccepted): + # NOTE: not bothering with 'reason' here, it's pointless. + error = AuthFailure(result=failure.result, prompt=prompt) + raise error + # Reraise for any other error so it bubbles up normally. + else: + raise + + # TODO: wonder if it makes sense to move this part of things inside Runner, + # which would grow a `prefixes` and `cwd` init kwargs or similar. The less + # that's stuffed into Context, probably the better. + def _prefix_commands(self, command: str) -> str: + """ + Prefixes ``command`` with all prefixes found in ``command_prefixes``. + + ``command_prefixes`` is a list of strings which is modified by the + `prefix` context manager. + """ + prefixes = list(self.command_prefixes) + current_directory = self.cwd + if current_directory: + prefixes.insert(0, "cd {}".format(current_directory)) + + return " && ".join(prefixes + [command]) + + @contextmanager + def prefix(self, command: str) -> Generator[None, None, None]: + """ + Prefix all nested `run`/`sudo` commands with given command plus ``&&``. + + Most of the time, you'll want to be using this alongside a shell script + which alters shell state, such as ones which export or alter shell + environment variables. + + For example, one of the most common uses of this tool is with the + ``workon`` command from `virtualenvwrapper + `_:: + + with c.prefix('workon myvenv'): + c.run('./manage.py migrate') + + In the above snippet, the actual shell command run would be this:: + + $ workon myvenv && ./manage.py migrate + + This context manager is compatible with `cd`, so if your virtualenv + doesn't ``cd`` in its ``postactivate`` script, you could do the + following:: + + with c.cd('/path/to/app'): + with c.prefix('workon myvenv'): + c.run('./manage.py migrate') + c.run('./manage.py loaddata fixture') + + Which would result in executions like so:: + + $ cd /path/to/app && workon myvenv && ./manage.py migrate + $ cd /path/to/app && workon myvenv && ./manage.py loaddata fixture + + Finally, as alluded to above, `prefix` may be nested if desired, e.g.:: + + with c.prefix('workon myenv'): + c.run('ls') + with c.prefix('source /some/script'): + c.run('touch a_file') + + The result:: + + $ workon myenv && ls + $ workon myenv && source /some/script && touch a_file + + Contrived, but hopefully illustrative. + + .. versionadded:: 1.0 + """ + self.command_prefixes.append(command) + try: + yield + finally: + self.command_prefixes.pop() + + @property + def cwd(self) -> str: + """ + Return the current working directory, accounting for uses of `cd`. + + .. versionadded:: 1.0 + """ + if not self.command_cwds: + # TODO: should this be None? Feels cleaner, though there may be + # benefits to it being an empty string, such as relying on a no-arg + # `cd` typically being shorthand for "go to user's $HOME". + return "" + + # get the index for the subset of paths starting with the last / or ~ + for i, path in reversed(list(enumerate(self.command_cwds))): + if path.startswith("~") or path.startswith("/"): + break + + # TODO: see if there's a stronger "escape this path" function somewhere + # we can reuse. e.g., escaping tildes or slashes in filenames. + paths = [path.replace(" ", r"\ ") for path in self.command_cwds[i:]] + return str(os.path.join(*paths)) + + @contextmanager + def cd(self, path: Union[PathLike, str]) -> Generator[None, None, None]: + """ + Context manager that keeps directory state when executing commands. + + Any calls to `run`, `sudo`, within the wrapped block will implicitly + have a string similar to ``"cd && "`` prefixed in order to give + the sense that there is actually statefulness involved. + + Because use of `cd` affects all such invocations, any code making use + of the `cwd` property will also be affected by use of `cd`. + + Like the actual 'cd' shell builtin, `cd` may be called with relative + paths (keep in mind that your default starting directory is your user's + ``$HOME``) and may be nested as well. + + Below is a "normal" attempt at using the shell 'cd', which doesn't work + since all commands are executed in individual subprocesses -- state is + **not** kept between invocations of `run` or `sudo`:: + + c.run('cd /var/www') + c.run('ls') + + The above snippet will list the contents of the user's ``$HOME`` + instead of ``/var/www``. With `cd`, however, it will work as expected:: + + with c.cd('/var/www'): + c.run('ls') # Turns into "cd /var/www && ls" + + Finally, a demonstration (see inline comments) of nesting:: + + with c.cd('/var/www'): + c.run('ls') # cd /var/www && ls + with c.cd('website1'): + c.run('ls') # cd /var/www/website1 && ls + + .. note:: + Space characters will be escaped automatically to make dealing with + such directory names easier. + + .. versionadded:: 1.0 + .. versionchanged:: 1.5 + Explicitly cast the ``path`` argument (the only argument) to a + string; this allows any object defining ``__str__`` to be handed in + (such as the various ``Path`` objects out there), and not just + string literals. + """ + path = str(path) + self.command_cwds.append(path) + try: + yield + finally: + self.command_cwds.pop() + + +class MockContext(Context): + """ + A `.Context` whose methods' return values can be predetermined. + + Primarily useful for testing Invoke-using codebases. + + .. note:: + This class wraps its ``run``, etc methods in `unittest.mock.Mock` + objects. This allows you to easily assert that the methods (still + returning the values you prepare them with) were actually called. + + .. note:: + Methods not given `Results <.Result>` to yield will raise + ``NotImplementedError`` if called (since the alternative is to call the + real underlying method - typically undesirable when mocking.) + + .. versionadded:: 1.0 + .. versionchanged:: 1.5 + Added ``Mock`` wrapping of ``run`` and ``sudo``. + """ + + def __init__(self, config: Optional[Config] = None, **kwargs: Any) -> None: + """ + Create a ``Context``-like object whose methods yield `.Result` objects. + + :param config: + A Configuration object to use. Identical in behavior to `.Context`. + + :param run: + A data structure indicating what `.Result` objects to return from + calls to the instantiated object's `~.Context.run` method (instead + of actually executing the requested shell command). + + Specifically, this kwarg accepts: + + - A single `.Result` object. + - A boolean; if True, yields a `.Result` whose ``exited`` is ``0``, + and if False, ``1``. + - An iterable of the above values, which will be returned on each + subsequent call to ``.run`` (the first item on the first call, + the second on the second call, etc). + - A dict mapping command strings or compiled regexen to the above + values (including an iterable), allowing specific + call-and-response semantics instead of assuming a call order. + + :param sudo: + Identical to ``run``, but whose values are yielded from calls to + `~.Context.sudo`. + + :param bool repeat: + A flag determining whether results yielded by this class' methods + repeat or are consumed. + + For example, when a single result is indicated, it will normally + only be returned once, causing ``NotImplementedError`` afterwards. + But when ``repeat=True`` is given, that result is returned on + every call, forever. + + Similarly, iterable results are normally exhausted once, but when + this setting is enabled, they are wrapped in `itertools.cycle`. + + Default: ``True``. + + :raises: + ``TypeError``, if the values given to ``run`` or other kwargs + aren't of the expected types. + + .. versionchanged:: 1.5 + Added support for boolean and string result values. + .. versionchanged:: 1.5 + Added support for regex dict keys. + .. versionchanged:: 1.5 + Added the ``repeat`` keyword argument. + .. versionchanged:: 2.0 + Changed ``repeat`` default value from ``False`` to ``True``. + """ + # Set up like any other Context would, with the config + super().__init__(config) + # Pull out behavioral kwargs + self._set("__repeat", kwargs.pop("repeat", True)) + # The rest must be things like run/sudo - mock Context method info + for method, results in kwargs.items(): + # For each possible value type, normalize to iterable of Result + # objects (possibly repeating). + singletons = (Result, bool, str) + if isinstance(results, dict): + for key, value in results.items(): + results[key] = self._normalize(value) + elif isinstance(results, singletons) or hasattr( + results, "__iter__" + ): + results = self._normalize(results) + # Unknown input value: cry + else: + err = "Not sure how to yield results from a {!r}" + raise TypeError(err.format(type(results))) + # Save results for use by the method + self._set("__{}".format(method), results) + # Wrap the method in a Mock + self._set(method, Mock(wraps=getattr(self, method))) + + def _normalize(self, value: Any) -> Iterator[Any]: + # First turn everything into an iterable + if not hasattr(value, "__iter__") or isinstance(value, str): + value = [value] + # Then turn everything within into a Result + results = [] + for obj in value: + if isinstance(obj, bool): + obj = Result(exited=0 if obj else 1) + elif isinstance(obj, str): + obj = Result(obj) + results.append(obj) + # Finally, turn that iterable into an iteratOR, depending on repeat + return cycle(results) if getattr(self, "__repeat") else iter(results) + + # TODO: _maybe_ make this more metaprogrammy/flexible (using __call__ etc)? + # Pretty worried it'd cause more hard-to-debug issues than it's presently + # worth. Maybe in situations where Context grows a _lot_ of methods (e.g. + # in Fabric 2; though Fabric could do its own sub-subclass in that case...) + + def _yield_result(self, attname: str, command: str) -> Result: + try: + obj = getattr(self, attname) + # Dicts need to try direct lookup or regex matching + if isinstance(obj, dict): + try: + obj = obj[command] + except KeyError: + # TODO: could optimize by skipping this if not any regex + # objects in keys()? + for key, value in obj.items(): + if hasattr(key, "match") and key.match(command): + obj = value + break + else: + # Nope, nothing did match. + raise KeyError + # Here, the value was either never a dict or has been extracted + # from one, so we can assume it's an iterable of Result objects due + # to work done by __init__. + result: Result = next(obj) + # Populate Result's command string with what matched unless + # explicitly given + if not result.command: + result.command = command + return result + except (AttributeError, IndexError, KeyError, StopIteration): + # raise_from(NotImplementedError(command), None) + raise NotImplementedError(command) + + def run(self, command: str, *args: Any, **kwargs: Any) -> Result: + # TODO: perform more convenience stuff associating args/kwargs with the + # result? E.g. filling in .command, etc? Possibly useful for debugging + # if one hits unexpected-order problems with what they passed in to + # __init__. + return self._yield_result("__run", command) + + def sudo(self, command: str, *args: Any, **kwargs: Any) -> Result: + # TODO: this completely nukes the top-level behavior of sudo(), which + # could be good or bad, depending. Most of the time I think it's good. + # No need to supply dummy password config, etc. + # TODO: see the TODO from run() re: injecting arg/kwarg values + return self._yield_result("__sudo", command) + + def set_result_for( + self, attname: str, command: str, result: Result + ) -> None: + """ + Modify the stored mock results for given ``attname`` (e.g. ``run``). + + This is similar to how one instantiates `MockContext` with a ``run`` or + ``sudo`` dict kwarg. For example, this:: + + mc = MockContext(run={'mycommand': Result("mystdout")}) + assert mc.run('mycommand').stdout == "mystdout" + + is functionally equivalent to this:: + + mc = MockContext() + mc.set_result_for('run', 'mycommand', Result("mystdout")) + assert mc.run('mycommand').stdout == "mystdout" + + `set_result_for` is mostly useful for modifying an already-instantiated + `MockContext`, such as one created by test setup or helper methods. + + .. versionadded:: 1.0 + """ + attname = "__{}".format(attname) + heck = TypeError( + "Can't update results for non-dict or nonexistent mock results!" + ) + # Get value & complain if it's not a dict. + # TODO: should we allow this to set non-dict values too? Seems vaguely + # pointless, at that point, just make a new MockContext eh? + try: + value = getattr(self, attname) + except AttributeError: + raise heck + if not isinstance(value, dict): + raise heck + # OK, we're good to modify, so do so. + value[command] = self._normalize(result) diff --git a/.venv/lib/python3.9/site-packages/invoke/env.py b/.venv/lib/python3.9/site-packages/invoke/env.py new file mode 100644 index 0000000..2c7aaa6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/env.py @@ -0,0 +1,123 @@ +""" +Environment variable configuration loading class. + +Using a class here doesn't really model anything but makes state passing (in a +situation requiring it) more convenient. + +This module is currently considered private/an implementation detail and should +not be included in the Sphinx API documentation. +""" + +import os +from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Mapping, Sequence + +from .exceptions import UncastableEnvVar, AmbiguousEnvVar +from .util import debug + +if TYPE_CHECKING: + from .config import Config + + +class Environment: + def __init__(self, config: "Config", prefix: str) -> None: + self._config = config + self._prefix = prefix + self.data: Dict[str, Any] = {} # Accumulator + + def load(self) -> Dict[str, Any]: + """ + Return a nested dict containing values from `os.environ`. + + Specifically, values whose keys map to already-known configuration + settings, allowing us to perform basic typecasting. + + See :ref:`env-vars` for details. + """ + # Obtain allowed env var -> existing value map + env_vars = self._crawl(key_path=[], env_vars={}) + m = "Scanning for env vars according to prefix: {!r}, mapping: {!r}" + debug(m.format(self._prefix, env_vars)) + # Check for actual env var (honoring prefix) and try to set + for env_var, key_path in env_vars.items(): + real_var = (self._prefix or "") + env_var + if real_var in os.environ: + self._path_set(key_path, os.environ[real_var]) + debug("Obtained env var config: {!r}".format(self.data)) + return self.data + + def _crawl( + self, key_path: List[str], env_vars: Mapping[str, Sequence[str]] + ) -> Dict[str, Any]: + """ + Examine config at location ``key_path`` & return potential env vars. + + Uses ``env_vars`` dict to determine if a conflict exists, and raises an + exception if so. This dict is of the following form:: + + { + 'EXPECTED_ENV_VAR_HERE': ['actual', 'nested', 'key_path'], + ... + } + + Returns another dictionary of new keypairs as per above. + """ + new_vars: Dict[str, List[str]] = {} + obj = self._path_get(key_path) + # Sub-dict -> recurse + if ( + hasattr(obj, "keys") + and callable(obj.keys) + and hasattr(obj, "__getitem__") + ): + for key in obj.keys(): + merged_vars = dict(env_vars, **new_vars) + merged_path = key_path + [key] + crawled = self._crawl(merged_path, merged_vars) + # Handle conflicts + for key in crawled: + if key in new_vars: + err = "Found >1 source for {}" + raise AmbiguousEnvVar(err.format(key)) + # Merge and continue + new_vars.update(crawled) + # Other -> is leaf, no recursion + else: + new_vars[self._to_env_var(key_path)] = key_path + return new_vars + + def _to_env_var(self, key_path: Iterable[str]) -> str: + return "_".join(key_path).upper() + + def _path_get(self, key_path: Iterable[str]) -> "Config": + # Gets are from self._config because that's what determines valid env + # vars and/or values for typecasting. + obj = self._config + for key in key_path: + obj = obj[key] + return obj + + def _path_set(self, key_path: Sequence[str], value: str) -> None: + # Sets are to self.data since that's what we are presenting to the + # outer config object and debugging. + obj = self.data + for key in key_path[:-1]: + if key not in obj: + obj[key] = {} + obj = obj[key] + old = self._path_get(key_path) + new = self._cast(old, value) + obj[key_path[-1]] = new + + def _cast(self, old: Any, new: Any) -> Any: + if isinstance(old, bool): + return new not in ("0", "") + elif isinstance(old, str): + return new + elif old is None: + return new + elif isinstance(old, (list, tuple)): + err = "Can't adapt an environment string into a {}!" + err = err.format(type(old)) + raise UncastableEnvVar(err) + else: + return old.__class__(new) diff --git a/.venv/lib/python3.9/site-packages/invoke/exceptions.py b/.venv/lib/python3.9/site-packages/invoke/exceptions.py new file mode 100644 index 0000000..19ca563 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/exceptions.py @@ -0,0 +1,425 @@ +""" +Custom exception classes. + +These vary in use case from "we needed a specific data structure layout in +exceptions used for message-passing" to simply "we needed to express an error +condition in a way easily told apart from other, truly unexpected errors". +""" + +from pprint import pformat +from traceback import format_exception +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple + +if TYPE_CHECKING: + from .parser import ParserContext + from .runners import Result + from .util import ExceptionWrapper + + +class CollectionNotFound(Exception): + def __init__(self, name: str, start: str) -> None: + self.name = name + self.start = start + + +class Failure(Exception): + """ + Exception subclass representing failure of a command execution. + + "Failure" may mean the command executed and the shell indicated an unusual + result (usually, a non-zero exit code), or it may mean something else, like + a ``sudo`` command which was aborted when the supplied password failed + authentication. + + Two attributes allow introspection to determine the nature of the problem: + + * ``result``: a `.Result` instance with info about the command being + executed and, if it ran to completion, how it exited. + * ``reason``: a wrapped exception instance if applicable (e.g. a + `.StreamWatcher` raised `WatcherError`) or ``None`` otherwise, in which + case, it's probably a `Failure` subclass indicating its own specific + nature, such as `UnexpectedExit` or `CommandTimedOut`. + + This class is only rarely raised by itself; most of the time `.Runner.run` + (or a wrapper of same, such as `.Context.sudo`) will raise a specific + subclass like `UnexpectedExit` or `AuthFailure`. + + .. versionadded:: 1.0 + """ + + def __init__( + self, result: "Result", reason: Optional["WatcherError"] = None + ) -> None: + self.result = result + self.reason = reason + + def streams_for_display(self) -> Tuple[str, str]: + """ + Return stdout/err streams as necessary for error display. + + Subject to the following rules: + + - If a given stream was *not* hidden during execution, a placeholder is + used instead, to avoid printing it twice. + - Only the last 10 lines of stream text is included. + - PTY-driven execution will lack stderr, and a specific message to this + effect is returned instead of a stderr dump. + + :returns: Two-tuple of stdout, stderr strings. + + .. versionadded:: 1.3 + """ + already_printed = " already printed" + if "stdout" not in self.result.hide: + stdout = already_printed + else: + stdout = self.result.tail("stdout") + if self.result.pty: + stderr = " n/a (PTYs have no stderr)" + else: + if "stderr" not in self.result.hide: + stderr = already_printed + else: + stderr = self.result.tail("stderr") + return stdout, stderr + + def __repr__(self) -> str: + return self._repr() + + def _repr(self, **kwargs: Any) -> str: + """ + Return ``__repr__``-like value from inner result + any kwargs. + """ + # TODO: expand? + # TODO: truncate command? + template = "<{}: cmd={!r}{}>" + rest = "" + if kwargs: + rest = " " + " ".join( + "{}={}".format(key, value) for key, value in kwargs.items() + ) + return template.format( + self.__class__.__name__, self.result.command, rest + ) + + +class UnexpectedExit(Failure): + """ + A shell command ran to completion but exited with an unexpected exit code. + + Its string representation displays the following: + + - Command executed; + - Exit code; + - The last 10 lines of stdout, if it was hidden; + - The last 10 lines of stderr, if it was hidden and non-empty (e.g. + pty=False; when pty=True, stderr never happens.) + + .. versionadded:: 1.0 + """ + + def __str__(self) -> str: + stdout, stderr = self.streams_for_display() + command = self.result.command + exited = self.result.exited + template = """Encountered a bad command exit code! + +Command: {!r} + +Exit code: {} + +Stdout:{} + +Stderr:{} + +""" + return template.format(command, exited, stdout, stderr) + + def _repr(self, **kwargs: Any) -> str: + kwargs.setdefault("exited", self.result.exited) + return super()._repr(**kwargs) + + +class CommandTimedOut(Failure): + """ + Raised when a subprocess did not exit within a desired timeframe. + """ + + def __init__(self, result: "Result", timeout: int) -> None: + super().__init__(result) + self.timeout = timeout + + def __repr__(self) -> str: + return self._repr(timeout=self.timeout) + + def __str__(self) -> str: + stdout, stderr = self.streams_for_display() + command = self.result.command + template = """Command did not complete within {} seconds! + +Command: {!r} + +Stdout:{} + +Stderr:{} + +""" + return template.format(self.timeout, command, stdout, stderr) + + +class AuthFailure(Failure): + """ + An authentication failure, e.g. due to an incorrect ``sudo`` password. + + .. note:: + `.Result` objects attached to these exceptions typically lack exit code + information, since the command was never fully executed - the exception + was raised instead. + + .. versionadded:: 1.0 + """ + + def __init__(self, result: "Result", prompt: str) -> None: + self.result = result + self.prompt = prompt + + def __str__(self) -> str: + err = "The password submitted to prompt {!r} was rejected." + return err.format(self.prompt) + + +class ParseError(Exception): + """ + An error arising from the parsing of command-line flags/arguments. + + Ambiguous input, invalid task names, invalid flags, etc. + + .. versionadded:: 1.0 + """ + + def __init__( + self, msg: str, context: Optional["ParserContext"] = None + ) -> None: + super().__init__(msg) + self.context = context + + +class Exit(Exception): + """ + Simple custom stand-in for SystemExit. + + Replaces scattered sys.exit calls, improves testability, allows one to + catch an exit request without intercepting real SystemExits (typically an + unfriendly thing to do, as most users calling `sys.exit` rather expect it + to truly exit.) + + Defaults to a non-printing, exit-0 friendly termination behavior if the + exception is uncaught. + + If ``code`` (an int) given, that code is used to exit. + + If ``message`` (a string) given, it is printed to standard error, and the + program exits with code ``1`` by default (unless overridden by also giving + ``code`` explicitly.) + + .. versionadded:: 1.0 + """ + + def __init__( + self, message: Optional[str] = None, code: Optional[int] = None + ) -> None: + self.message = message + self._code = code + + @property + def code(self) -> int: + if self._code is not None: + return self._code + return 1 if self.message else 0 + + +class PlatformError(Exception): + """ + Raised when an illegal operation occurs for the current platform. + + E.g. Windows users trying to use functionality requiring the ``pty`` + module. + + Typically used to present a clearer error message to the user. + + .. versionadded:: 1.0 + """ + + pass + + +class AmbiguousEnvVar(Exception): + """ + Raised when loading env var config keys has an ambiguous target. + + .. versionadded:: 1.0 + """ + + pass + + +class UncastableEnvVar(Exception): + """ + Raised on attempted env var loads whose default values are too rich. + + E.g. trying to stuff ``MY_VAR="foo"`` into ``{'my_var': ['uh', 'oh']}`` + doesn't make any sense until/if we implement some sort of transform option. + + .. versionadded:: 1.0 + """ + + pass + + +class UnknownFileType(Exception): + """ + A config file of an unknown type was specified and cannot be loaded. + + .. versionadded:: 1.0 + """ + + pass + + +class UnpicklableConfigMember(Exception): + """ + A config file contained module objects, which can't be pickled/copied. + + We raise this more easily catchable exception instead of letting the + (unclearly phrased) TypeError bubble out of the pickle module. (However, to + avoid our own fragile catching of that error, we head it off by explicitly + testing for module members.) + + .. versionadded:: 1.0.2 + """ + + pass + + +def _printable_kwargs(kwargs: Any) -> Dict[str, Any]: + """ + Return print-friendly version of a thread-related ``kwargs`` dict. + + Extra care is taken with ``args`` members which are very long iterables - + those need truncating to be useful. + """ + printable = {} + for key, value in kwargs.items(): + item = value + if key == "args": + item = [] + for arg in value: + new_arg = arg + if hasattr(arg, "__len__") and len(arg) > 10: + msg = "<... remainder truncated during error display ...>" + new_arg = arg[:10] + [msg] + item.append(new_arg) + printable[key] = item + return printable + + +class ThreadException(Exception): + """ + One or more exceptions were raised within background threads. + + The real underlying exceptions are stored in the `exceptions` attribute; + see its documentation for data structure details. + + .. note:: + Threads which did not encounter an exception, do not contribute to this + exception object and thus are not present inside `exceptions`. + + .. versionadded:: 1.0 + """ + + #: A tuple of `ExceptionWrappers ` containing + #: the initial thread constructor kwargs (because `threading.Thread` + #: subclasses should always be called with kwargs) and the caught exception + #: for that thread as seen by `sys.exc_info` (so: type, value, traceback). + #: + #: .. note:: + #: The ordering of this attribute is not well-defined. + #: + #: .. note:: + #: Thread kwargs which appear to be very long (e.g. IO + #: buffers) will be truncated when printed, to avoid huge + #: unreadable error display. + exceptions: Tuple["ExceptionWrapper", ...] = tuple() + + def __init__(self, exceptions: List["ExceptionWrapper"]) -> None: + self.exceptions = tuple(exceptions) + + def __str__(self) -> str: + details = [] + for x in self.exceptions: + # Build useful display + detail = "Thread args: {}\n\n{}" + details.append( + detail.format( + pformat(_printable_kwargs(x.kwargs)), + "\n".join(format_exception(x.type, x.value, x.traceback)), + ) + ) + args = ( + len(self.exceptions), + ", ".join(x.type.__name__ for x in self.exceptions), + "\n\n".join(details), + ) + return """ +Saw {} exceptions within threads ({}): + + +{} +""".format( + *args + ) + + +class WatcherError(Exception): + """ + Generic parent exception class for `.StreamWatcher`-related errors. + + Typically, one of these exceptions indicates a `.StreamWatcher` noticed + something anomalous in an output stream, such as an authentication response + failure. + + `.Runner` catches these and attaches them to `.Failure` exceptions so they + can be referenced by intermediate code and/or act as extra info for end + users. + + .. versionadded:: 1.0 + """ + + pass + + +class ResponseNotAccepted(WatcherError): + """ + A responder/watcher class noticed a 'bad' response to its submission. + + Mostly used by `.FailingResponder` and subclasses, e.g. "oh dear I + autosubmitted a sudo password and it was incorrect." + + .. versionadded:: 1.0 + """ + + pass + + +class SubprocessPipeError(Exception): + """ + Some problem was encountered handling subprocess pipes (stdout/err/in). + + Typically only for corner cases; most of the time, errors in this area are + raised by the interpreter or the operating system, and end up wrapped in a + `.ThreadException`. + + .. versionadded:: 1.3 + """ + + pass diff --git a/.venv/lib/python3.9/site-packages/invoke/executor.py b/.venv/lib/python3.9/site-packages/invoke/executor.py new file mode 100644 index 0000000..08aa74e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/executor.py @@ -0,0 +1,229 @@ +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union + +from .config import Config +from .parser import ParserContext +from .util import debug +from .tasks import Call, Task + +if TYPE_CHECKING: + from .collection import Collection + from .runners import Result + from .parser import ParseResult + + +class Executor: + """ + An execution strategy for Task objects. + + Subclasses may override various extension points to change, add or remove + behavior. + + .. versionadded:: 1.0 + """ + + def __init__( + self, + collection: "Collection", + config: Optional["Config"] = None, + core: Optional["ParseResult"] = None, + ) -> None: + """ + Initialize executor with handles to necessary data structures. + + :param collection: + A `.Collection` used to look up requested tasks (and their default + config data, if any) by name during execution. + + :param config: + An optional `.Config` holding configuration state. Defaults to an + empty `.Config` if not given. + + :param core: + An optional `.ParseResult` holding parsed core program arguments. + Defaults to ``None``. + """ + self.collection = collection + self.config = config if config is not None else Config() + self.core = core + + def execute( + self, *tasks: Union[str, Tuple[str, Dict[str, Any]], ParserContext] + ) -> Dict["Task", "Result"]: + """ + Execute one or more ``tasks`` in sequence. + + :param tasks: + An all-purpose iterable of "tasks to execute", each member of which + may take one of the following forms: + + **A string** naming a task from the Executor's `.Collection`. This + name may contain dotted syntax appropriate for calling namespaced + tasks, e.g. ``subcollection.taskname``. Such tasks are executed + without arguments. + + **A two-tuple** whose first element is a task name string (as + above) and whose second element is a dict suitable for use as + ``**kwargs`` when calling the named task. E.g.:: + + [ + ('task1', {}), + ('task2', {'arg1': 'val1'}), + ... + ] + + is equivalent, roughly, to:: + + task1() + task2(arg1='val1') + + **A `.ParserContext`** instance, whose ``.name`` attribute is used + as the task name and whose ``.as_kwargs`` attribute is used as the + task kwargs (again following the above specifications). + + .. note:: + When called without any arguments at all (i.e. when ``*tasks`` + is empty), the default task from ``self.collection`` is used + instead, if defined. + + :returns: + A dict mapping task objects to their return values. + + This dict may include pre- and post-tasks if any were executed. For + example, in a collection with a ``build`` task depending on another + task named ``setup``, executing ``build`` will result in a dict + with two keys, one for ``build`` and one for ``setup``. + + .. versionadded:: 1.0 + """ + # Normalize input + debug("Examining top level tasks {!r}".format([x for x in tasks])) + calls = self.normalize(tasks) + debug("Tasks (now Calls) with kwargs: {!r}".format(calls)) + # Obtain copy of directly-given tasks since they should sometimes + # behave differently + direct = list(calls) + # Expand pre/post tasks + # TODO: may make sense to bundle expansion & deduping now eh? + expanded = self.expand_calls(calls) + # Get some good value for dedupe option, even if config doesn't have + # the tree we expect. (This is a concession to testing.) + try: + dedupe = self.config.tasks.dedupe + except AttributeError: + dedupe = True + # Dedupe across entire run now that we know about all calls in order + calls = self.dedupe(expanded) if dedupe else expanded + # Execute + results = {} + # TODO: maybe clone initial config here? Probably not necessary, + # especially given Executor is not designed to execute() >1 time at the + # moment... + for call in calls: + autoprint = call in direct and call.autoprint + debug("Executing {!r}".format(call)) + # Hand in reference to our config, which will preserve user + # modifications across the lifetime of the session. + config = self.config + # But make sure we reset its task-sensitive levels each time + # (collection & shell env) + # TODO: load_collection needs to be skipped if task is anonymous + # (Fabric 2 or other subclassing libs only) + collection_config = self.collection.configuration(call.called_as) + config.load_collection(collection_config) + config.load_shell_env() + debug("Finished loading collection & shell env configs") + # Get final context from the Call (which will know how to generate + # an appropriate one; e.g. subclasses might use extra data from + # being parameterized), handing in this config for use there. + context = call.make_context(config) + args = (context, *call.args) + result = call.task(*args, **call.kwargs) + if autoprint: + print(result) + # TODO: handle the non-dedupe case / the same-task-different-args + # case, wherein one task obj maps to >1 result. + results[call.task] = result + return results + + def normalize( + self, + tasks: Tuple[ + Union[str, Tuple[str, Dict[str, Any]], ParserContext], ... + ], + ) -> List["Call"]: + """ + Transform arbitrary task list w/ various types, into `.Call` objects. + + See docstring for `~.Executor.execute` for details. + + .. versionadded:: 1.0 + """ + calls = [] + for task in tasks: + name: Optional[str] + if isinstance(task, str): + name = task + kwargs = {} + elif isinstance(task, ParserContext): + name = task.name + kwargs = task.as_kwargs + else: + name, kwargs = task + c = Call(self.collection[name], kwargs=kwargs, called_as=name) + calls.append(c) + if not tasks and self.collection.default is not None: + calls = [Call(self.collection[self.collection.default])] + return calls + + def dedupe(self, calls: List["Call"]) -> List["Call"]: + """ + Deduplicate a list of `tasks <.Call>`. + + :param calls: An iterable of `.Call` objects representing tasks. + + :returns: A list of `.Call` objects. + + .. versionadded:: 1.0 + """ + deduped = [] + debug("Deduplicating tasks...") + for call in calls: + if call not in deduped: + debug("{!r}: no duplicates found, ok".format(call)) + deduped.append(call) + else: + debug("{!r}: found in list already, skipping".format(call)) + return deduped + + def expand_calls(self, calls: List["Call"]) -> List["Call"]: + """ + Expand a list of `.Call` objects into a near-final list of same. + + The default implementation of this method simply adds a task's + pre/post-task list before/after the task itself, as necessary. + + Subclasses may wish to do other things in addition (or instead of) the + above, such as multiplying the `calls <.Call>` by argument vectors or + similar. + + .. versionadded:: 1.0 + """ + ret = [] + for call in calls: + # Normalize to Call (this method is sometimes called with pre/post + # task lists, which may contain 'raw' Task objects) + if isinstance(call, Task): + call = Call(call) + debug("Expanding task-call {!r}".format(call)) + # TODO: this is where we _used_ to call Executor.config_for(call, + # config)... + # TODO: now we may need to preserve more info like where the call + # came from, etc, but I feel like that shit should go _on the call + # itself_ right??? + # TODO: we _probably_ don't even want the config in here anymore, + # we want this to _just_ be about the recursion across pre/post + # tasks or parameterization...? + ret.extend(self.expand_calls(call.pre)) + ret.append(call) + ret.extend(self.expand_calls(call.post)) + return ret diff --git a/.venv/lib/python3.9/site-packages/invoke/loader.py b/.venv/lib/python3.9/site-packages/invoke/loader.py new file mode 100644 index 0000000..801d163 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/loader.py @@ -0,0 +1,154 @@ +import os +import sys +from importlib.machinery import ModuleSpec +from importlib.util import module_from_spec, spec_from_file_location +from pathlib import Path +from types import ModuleType +from typing import Any, Optional, Tuple + +from . import Config +from .exceptions import CollectionNotFound +from .util import debug + + +class Loader: + """ + Abstract class defining how to find/import a session's base `.Collection`. + + .. versionadded:: 1.0 + """ + + def __init__(self, config: Optional["Config"] = None) -> None: + """ + Set up a new loader with some `.Config`. + + :param config: + An explicit `.Config` to use; it is referenced for loading-related + config options. Defaults to an anonymous ``Config()`` if none is + given. + """ + if config is None: + config = Config() + self.config = config + + def find(self, name: str) -> Optional[ModuleSpec]: + """ + Implementation-specific finder method seeking collection ``name``. + + Must return a ModuleSpec valid for use by `importlib`, which is + typically a name string followed by the contents of the 3-tuple + returned by `importlib.module_from_spec` (``name``, ``loader``, + ``origin``.) + + For a sample implementation, see `.FilesystemLoader`. + + .. versionadded:: 1.0 + """ + raise NotImplementedError + + def load(self, name: Optional[str] = None) -> Tuple[ModuleType, str]: + """ + Load and return collection module identified by ``name``. + + This method requires a working implementation of `.find` in order to + function. + + In addition to importing the named module, it will add the module's + parent directory to the front of `sys.path` to provide normal Python + import behavior (i.e. so the loaded module may load local-to-it modules + or packages.) + + :returns: + Two-tuple of ``(module, directory)`` where ``module`` is the + collection-containing Python module object, and ``directory`` is + the string path to the directory the module was found in. + + .. versionadded:: 1.0 + """ + if name is None: + name = self.config.tasks.collection_name + spec = self.find(name) + if spec and spec.loader and spec.origin: + # Typically either tasks.py or tasks/__init__.py + source_file = Path(spec.origin) + # Will be 'the dir tasks.py is in', or 'tasks/', in both cases this + # is what wants to be in sys.path for "from . import sibling" + enclosing_dir = source_file.parent + # Will be "the directory above the spot that 'import tasks' found", + # namely the parent of "your task tree", i.e. "where project level + # config files are looked for". So, same as enclosing_dir for + # tasks.py, but one more level up for tasks/__init__.py... + module_parent = enclosing_dir + if spec.parent: # it's a package, so we have to go up again + module_parent = module_parent.parent + # Get the enclosing dir on the path + enclosing_str = str(enclosing_dir) + if enclosing_str not in sys.path: + sys.path.insert(0, enclosing_str) + # Actual import + module = module_from_spec(spec) + sys.modules[spec.name] = module # so 'from . import xxx' works + spec.loader.exec_module(module) + # Return the module and the folder it was found in + return module, str(module_parent) + msg = "ImportError loading {!r}, raising ImportError" + debug(msg.format(name)) + raise ImportError + + +class FilesystemLoader(Loader): + """ + Loads Python files from the filesystem (e.g. ``tasks.py``.) + + Searches recursively towards filesystem root from a given start point. + + .. versionadded:: 1.0 + """ + + # TODO: could introduce config obj here for transmission to Collection + # TODO: otherwise Loader has to know about specific bits to transmit, such + # as auto-dashes, and has to grow one of those for every bit Collection + # ever needs to know + def __init__(self, start: Optional[str] = None, **kwargs: Any) -> None: + super().__init__(**kwargs) + if start is None: + start = self.config.tasks.search_root + self._start = start + + @property + def start(self) -> str: + # Lazily determine default CWD if configured value is falsey + return self._start or os.getcwd() + + def find(self, name: str) -> Optional[ModuleSpec]: + debug("FilesystemLoader find starting at {!r}".format(self.start)) + spec = None + module = "{}.py".format(name) + paths = self.start.split(os.sep) + try: + # walk the path upwards to check for dynamic import + for x in reversed(range(len(paths) + 1)): + path = os.sep.join(paths[0:x]) + if module in os.listdir(path): + spec = spec_from_file_location( + name, os.path.join(path, module) + ) + break + elif name in os.listdir(path) and os.path.exists( + os.path.join(path, name, "__init__.py") + ): + basepath = os.path.join(path, name) + spec = spec_from_file_location( + name, + os.path.join(basepath, "__init__.py"), + submodule_search_locations=[basepath], + ) + break + if spec: + debug("Found module: {!r}".format(spec)) + return spec + except (FileNotFoundError, ModuleNotFoundError): + msg = "ImportError loading {!r}, raising CollectionNotFound" + debug(msg.format(name)) + raise CollectionNotFound(name=name, start=self.start) + return None diff --git a/.venv/lib/python3.9/site-packages/invoke/main.py b/.venv/lib/python3.9/site-packages/invoke/main.py new file mode 100644 index 0000000..3576b5a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/main.py @@ -0,0 +1,14 @@ +""" +Invoke's own 'binary' entrypoint. + +Dogfoods the `program` module. +""" + +from . import __version__, Program + +program = Program( + name="Invoke", + binary="inv[oke]", + binary_names=["invoke", "inv"], + version=__version__, +) diff --git a/.venv/lib/python3.9/site-packages/invoke/parser/__init__.py b/.venv/lib/python3.9/site-packages/invoke/parser/__init__.py new file mode 100644 index 0000000..02aa026 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/parser/__init__.py @@ -0,0 +1,5 @@ +# flake8: noqa +from .parser import * +from .context import ParserContext +from .context import ParserContext as Context, to_flag, translate_underscores +from .argument import Argument diff --git a/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..dd1bc76 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/argument.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/argument.cpython-39.pyc new file mode 100644 index 0000000..a97e547 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/argument.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/context.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/context.cpython-39.pyc new file mode 100644 index 0000000..e5c8a7c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/context.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/parser.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/parser.cpython-39.pyc new file mode 100644 index 0000000..8846dd1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/parser/__pycache__/parser.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/parser/argument.py b/.venv/lib/python3.9/site-packages/invoke/parser/argument.py new file mode 100644 index 0000000..761eb60 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/parser/argument.py @@ -0,0 +1,178 @@ +from typing import Any, Iterable, Optional, Tuple + +# TODO: dynamic type for kind +# T = TypeVar('T') + + +class Argument: + """ + A command-line argument/flag. + + :param name: + Syntactic sugar for ``names=[]``. Giving both ``name`` and + ``names`` is invalid. + :param names: + List of valid identifiers for this argument. For example, a "help" + argument may be defined with a name list of ``['-h', '--help']``. + :param kind: + Type factory & parser hint. E.g. ``int`` will turn the default text + value parsed, into a Python integer; and ``bool`` will tell the + parser not to expect an actual value but to treat the argument as a + toggle/flag. + :param default: + Default value made available to the parser if no value is given on the + command line. + :param help: + Help text, intended for use with ``--help``. + :param positional: + Whether or not this argument's value may be given positionally. When + ``False`` (default) arguments must be explicitly named. + :param optional: + Whether or not this (non-``bool``) argument requires a value. + :param incrementable: + Whether or not this (``int``) argument is to be incremented instead of + overwritten/assigned to. + :param attr_name: + A Python identifier/attribute friendly name, typically filled in with + the underscored version when ``name``/``names`` contain dashes. + + .. versionadded:: 1.0 + """ + + def __init__( + self, + name: Optional[str] = None, + names: Iterable[str] = (), + kind: Any = str, + default: Optional[Any] = None, + help: Optional[str] = None, + positional: bool = False, + optional: bool = False, + incrementable: bool = False, + attr_name: Optional[str] = None, + ) -> None: + if name and names: + raise TypeError( + "Cannot give both 'name' and 'names' arguments! Pick one." + ) + if not (name or names): + raise TypeError("An Argument must have at least one name.") + if names: + self.names = tuple(names) + elif name and not names: + self.names = (name,) + self.kind = kind + initial_value: Optional[Any] = None + # Special case: list-type args start out as empty list, not None. + if kind is list: + initial_value = [] + # Another: incrementable args start out as their default value. + if incrementable: + initial_value = default + self.raw_value = self._value = initial_value + self.default = default + self.help = help + self.positional = positional + self.optional = optional + self.incrementable = incrementable + self.attr_name = attr_name + + def __repr__(self) -> str: + nicks = "" + if self.nicknames: + nicks = " ({})".format(", ".join(self.nicknames)) + flags = "" + if self.positional or self.optional: + flags = " " + if self.positional: + flags += "*" + if self.optional: + flags += "?" + # TODO: store this default value somewhere other than signature of + # Argument.__init__? + kind = "" + if self.kind != str: + kind = " [{}]".format(self.kind.__name__) + return "<{}: {}{}{}{}>".format( + self.__class__.__name__, self.name, nicks, kind, flags + ) + + @property + def name(self) -> Optional[str]: + """ + The canonical attribute-friendly name for this argument. + + Will be ``attr_name`` (if given to constructor) or the first name in + ``names`` otherwise. + + .. versionadded:: 1.0 + """ + return self.attr_name or self.names[0] + + @property + def nicknames(self) -> Tuple[str, ...]: + return self.names[1:] + + @property + def takes_value(self) -> bool: + if self.kind is bool: + return False + if self.incrementable: + return False + return True + + @property + def value(self) -> Any: + # TODO: should probably be optional instead + return self._value if self._value is not None else self.default + + @value.setter + def value(self, arg: str) -> None: + self.set_value(arg, cast=True) + + def set_value(self, value: Any, cast: bool = True) -> None: + """ + Actual explicit value-setting API call. + + Sets ``self.raw_value`` to ``value`` directly. + + Sets ``self.value`` to ``self.kind(value)``, unless: + + - ``cast=False``, in which case the raw value is also used. + - ``self.kind==list``, in which case the value is appended to + ``self.value`` instead of cast & overwritten. + - ``self.incrementable==True``, in which case the value is ignored and + the current (assumed int) value is simply incremented. + + .. versionadded:: 1.0 + """ + self.raw_value = value + # Default to do-nothing/identity function + func = lambda x: x + # If cast, set to self.kind, which should be str/int/etc + if cast: + func = self.kind + # If self.kind is a list, append instead of using cast func. + if self.kind is list: + func = lambda x: self.value + [x] + # If incrementable, just increment. + if self.incrementable: + # TODO: explode nicely if self.value was not an int to start + # with + func = lambda x: self.value + 1 + self._value = func(value) + + @property + def got_value(self) -> bool: + """ + Returns whether the argument was ever given a (non-default) value. + + For most argument kinds, this simply checks whether the internally + stored value is non-``None``; for others, such as ``list`` kinds, + different checks may be used. + + .. versionadded:: 1.3 + """ + if self.kind is list: + return bool(self._value) + return self._value is not None diff --git a/.venv/lib/python3.9/site-packages/invoke/parser/context.py b/.venv/lib/python3.9/site-packages/invoke/parser/context.py new file mode 100644 index 0000000..359e9f9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/parser/context.py @@ -0,0 +1,266 @@ +import itertools +from typing import Any, Dict, List, Iterable, Optional, Tuple, Union + +try: + from ..vendor.lexicon import Lexicon +except ImportError: + from lexicon import Lexicon # type: ignore[no-redef] + +from .argument import Argument + + +def translate_underscores(name: str) -> str: + return name.lstrip("_").rstrip("_").replace("_", "-") + + +def to_flag(name: str) -> str: + name = translate_underscores(name) + if len(name) == 1: + return "-" + name + return "--" + name + + +def sort_candidate(arg: Argument) -> str: + names = arg.names + # TODO: is there no "split into two buckets on predicate" builtin? + shorts = {x for x in names if len(x.strip("-")) == 1} + longs = {x for x in names if x not in shorts} + return str(sorted(shorts if shorts else longs)[0]) + + +def flag_key(arg: Argument) -> List[Union[int, str]]: + """ + Obtain useful key list-of-ints for sorting CLI flags. + + .. versionadded:: 1.0 + """ + # Setup + ret: List[Union[int, str]] = [] + x = sort_candidate(arg) + # Long-style flags win over short-style ones, so the first item of + # comparison is simply whether the flag is a single character long (with + # non-length-1 flags coming "first" [lower number]) + ret.append(1 if len(x) == 1 else 0) + # Next item of comparison is simply the strings themselves, + # case-insensitive. They will compare alphabetically if compared at this + # stage. + ret.append(x.lower()) + # Finally, if the case-insensitive test also matched, compare + # case-sensitive, but inverse (with lowercase letters coming first) + inversed = "" + for char in x: + inversed += char.lower() if char.isupper() else char.upper() + ret.append(inversed) + return ret + + +# Named slightly more verbose so Sphinx references can be unambiguous. +# Got real sick of fully qualified paths. +class ParserContext: + """ + Parsing context with knowledge of flags & their format. + + Generally associated with the core program or a task. + + When run through a parser, will also hold runtime values filled in by the + parser. + + .. versionadded:: 1.0 + """ + + def __init__( + self, + name: Optional[str] = None, + aliases: Iterable[str] = (), + args: Iterable[Argument] = (), + ) -> None: + """ + Create a new ``ParserContext`` named ``name``, with ``aliases``. + + ``name`` is optional, and should be a string if given. It's used to + tell ParserContext objects apart, and for use in a Parser when + determining what chunk of input might belong to a given ParserContext. + + ``aliases`` is also optional and should be an iterable containing + strings. Parsing will honor any aliases when trying to "find" a given + context in its input. + + May give one or more ``args``, which is a quick alternative to calling + ``for arg in args: self.add_arg(arg)`` after initialization. + """ + self.args = Lexicon() + self.positional_args: List[Argument] = [] + self.flags = Lexicon() + self.inverse_flags: Dict[str, str] = {} # No need for Lexicon here + self.name = name + self.aliases = aliases + for arg in args: + self.add_arg(arg) + + def __repr__(self) -> str: + aliases = "" + if self.aliases: + aliases = " ({})".format(", ".join(self.aliases)) + name = (" {!r}{}".format(self.name, aliases)) if self.name else "" + args = (": {!r}".format(self.args)) if self.args else "" + return "".format(name, args) + + def add_arg(self, *args: Any, **kwargs: Any) -> None: + """ + Adds given ``Argument`` (or constructor args for one) to this context. + + The Argument in question is added to the following dict attributes: + + * ``args``: "normal" access, i.e. the given names are directly exposed + as keys. + * ``flags``: "flaglike" access, i.e. the given names are translated + into CLI flags, e.g. ``"foo"`` is accessible via ``flags['--foo']``. + * ``inverse_flags``: similar to ``flags`` but containing only the + "inverse" versions of boolean flags which default to True. This + allows the parser to track e.g. ``--no-myflag`` and turn it into a + False value for the ``myflag`` Argument. + + .. versionadded:: 1.0 + """ + # Normalize + if len(args) == 1 and isinstance(args[0], Argument): + arg = args[0] + else: + arg = Argument(*args, **kwargs) + # Uniqueness constraint: no name collisions + for name in arg.names: + if name in self.args: + msg = "Tried to add an argument named {!r} but one already exists!" # noqa + raise ValueError(msg.format(name)) + # First name used as "main" name for purposes of aliasing + main = arg.names[0] # NOT arg.name + self.args[main] = arg + # Note positionals in distinct, ordered list attribute + if arg.positional: + self.positional_args.append(arg) + # Add names & nicknames to flags, args + self.flags[to_flag(main)] = arg + for name in arg.nicknames: + self.args.alias(name, to=main) + self.flags.alias(to_flag(name), to=to_flag(main)) + # Add attr_name to args, but not flags + if arg.attr_name: + self.args.alias(arg.attr_name, to=main) + # Add to inverse_flags if required + if arg.kind == bool and arg.default is True: + # Invert the 'main' flag name here, which will be a dashed version + # of the primary argument name if underscore-to-dash transformation + # occurred. + inverse_name = to_flag("no-{}".format(main)) + self.inverse_flags[inverse_name] = to_flag(main) + + @property + def missing_positional_args(self) -> List[Argument]: + return [x for x in self.positional_args if x.value is None] + + @property + def as_kwargs(self) -> Dict[str, Any]: + """ + This context's arguments' values keyed by their ``.name`` attribute. + + Results in a dict suitable for use in Python contexts, where e.g. an + arg named ``foo-bar`` becomes accessible as ``foo_bar``. + + .. versionadded:: 1.0 + """ + ret = {} + for arg in self.args.values(): + ret[arg.name] = arg.value + return ret + + def names_for(self, flag: str) -> List[str]: + # TODO: should probably be a method on Lexicon/AliasDict + return list(set([flag] + self.flags.aliases_of(flag))) + + def help_for(self, flag: str) -> Tuple[str, str]: + """ + Return 2-tuple of ``(flag-spec, help-string)`` for given ``flag``. + + .. versionadded:: 1.0 + """ + # Obtain arg obj + if flag not in self.flags: + err = "{!r} is not a valid flag for this context! Valid flags are: {!r}" # noqa + raise ValueError(err.format(flag, self.flags.keys())) + arg = self.flags[flag] + # Determine expected value type, if any + value = {str: "STRING", int: "INT"}.get(arg.kind) + # Format & go + full_names = [] + for name in self.names_for(flag): + if value: + # Short flags are -f VAL, long are --foo=VAL + # When optional, also, -f [VAL] and --foo[=VAL] + if len(name.strip("-")) == 1: + value_ = ("[{}]".format(value)) if arg.optional else value + valuestr = " {}".format(value_) + else: + valuestr = "={}".format(value) + if arg.optional: + valuestr = "[{}]".format(valuestr) + else: + # no value => boolean + # check for inverse + if name in self.inverse_flags.values(): + name = "--[no-]{}".format(name[2:]) + + valuestr = "" + # Tack together + full_names.append(name + valuestr) + namestr = ", ".join(sorted(full_names, key=len)) + helpstr = arg.help or "" + return namestr, helpstr + + def help_tuples(self) -> List[Tuple[str, Optional[str]]]: + """ + Return sorted iterable of help tuples for all member Arguments. + + Sorts like so: + + * General sort is alphanumerically + * Short flags win over long flags + * Arguments with *only* long flags and *no* short flags will come + first. + * When an Argument has multiple long or short flags, it will sort using + the most favorable (lowest alphabetically) candidate. + + This will result in a help list like so:: + + --alpha, --zeta # 'alpha' wins + --beta + -a, --query # short flag wins + -b, --argh + -c + + .. versionadded:: 1.0 + """ + # TODO: argument/flag API must change :( + # having to call to_flag on 1st name of an Argument is just dumb. + # To pass in an Argument object to help_for may require moderate + # changes? + return list( + map( + lambda x: self.help_for(to_flag(x.name)), + sorted(self.flags.values(), key=flag_key), + ) + ) + + def flag_names(self) -> Tuple[str, ...]: + """ + Similar to `help_tuples` but returns flag names only, no helpstrs. + + Specifically, all flag names, flattened, in rough order. + + .. versionadded:: 1.0 + """ + # Regular flag names + flags = sorted(self.flags.values(), key=flag_key) + names = [self.names_for(to_flag(x.name)) for x in flags] + # Inverse flag names sold separately + names.append(list(self.inverse_flags.keys())) + return tuple(itertools.chain.from_iterable(names)) diff --git a/.venv/lib/python3.9/site-packages/invoke/parser/parser.py b/.venv/lib/python3.9/site-packages/invoke/parser/parser.py new file mode 100644 index 0000000..43e95df --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/parser/parser.py @@ -0,0 +1,455 @@ +import copy +from typing import TYPE_CHECKING, Any, Iterable, List, Optional + +try: + from ..vendor.lexicon import Lexicon + from ..vendor.fluidity import StateMachine, state, transition +except ImportError: + from lexicon import Lexicon # type: ignore[no-redef] + from fluidity import ( # type: ignore[no-redef] + StateMachine, + state, + transition, + ) + +from ..exceptions import ParseError +from ..util import debug + +if TYPE_CHECKING: + from .context import ParserContext + + +def is_flag(value: str) -> bool: + return value.startswith("-") + + +def is_long_flag(value: str) -> bool: + return value.startswith("--") + + +class ParseResult(List["ParserContext"]): + """ + List-like object with some extra parse-related attributes. + + Specifically, a ``.remainder`` attribute, which is the string found after a + ``--`` in any parsed argv list; and an ``.unparsed`` attribute, a list of + tokens that were unable to be parsed. + + .. versionadded:: 1.0 + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.remainder = "" + self.unparsed: List[str] = [] + + +class Parser: + """ + Create parser conscious of ``contexts`` and optional ``initial`` context. + + ``contexts`` should be an iterable of ``Context`` instances which will be + searched when new context names are encountered during a parse. These + Contexts determine what flags may follow them, as well as whether given + flags take values. + + ``initial`` is optional and will be used to determine validity of "core" + options/flags at the start of the parse run, if any are encountered. + + ``ignore_unknown`` determines what to do when contexts are found which do + not map to any members of ``contexts``. By default it is ``False``, meaning + any unknown contexts result in a parse error exception. If ``True``, + encountering an unknown context halts parsing and populates the return + value's ``.unparsed`` attribute with the remaining parse tokens. + + .. versionadded:: 1.0 + """ + + def __init__( + self, + contexts: Iterable["ParserContext"] = (), + initial: Optional["ParserContext"] = None, + ignore_unknown: bool = False, + ) -> None: + self.initial = initial + self.contexts = Lexicon() + self.ignore_unknown = ignore_unknown + for context in contexts: + debug("Adding {}".format(context)) + if not context.name: + raise ValueError("Non-initial contexts must have names.") + exists = "A context named/aliased {!r} is already in this parser!" + if context.name in self.contexts: + raise ValueError(exists.format(context.name)) + self.contexts[context.name] = context + for alias in context.aliases: + if alias in self.contexts: + raise ValueError(exists.format(alias)) + self.contexts.alias(alias, to=context.name) + + def parse_argv(self, argv: List[str]) -> ParseResult: + """ + Parse an argv-style token list ``argv``. + + Returns a list (actually a subclass, `.ParseResult`) of + `.ParserContext` objects matching the order they were found in the + ``argv`` and containing `.Argument` objects with updated values based + on any flags given. + + Assumes any program name has already been stripped out. Good:: + + Parser(...).parse_argv(['--core-opt', 'task', '--task-opt']) + + Bad:: + + Parser(...).parse_argv(['invoke', '--core-opt', ...]) + + :param argv: List of argument string tokens. + :returns: + A `.ParseResult` (a ``list`` subclass containing some number of + `.ParserContext` objects). + + .. versionadded:: 1.0 + """ + machine = ParseMachine( + # FIXME: initial should not be none + initial=self.initial, # type: ignore[arg-type] + contexts=self.contexts, + ignore_unknown=self.ignore_unknown, + ) + # FIXME: Why isn't there str.partition for lists? There must be a + # better way to do this. Split argv around the double-dash remainder + # sentinel. + debug("Starting argv: {!r}".format(argv)) + try: + ddash = argv.index("--") + except ValueError: + ddash = len(argv) # No remainder == body gets all + body = argv[:ddash] + remainder = argv[ddash:][1:] # [1:] to strip off remainder itself + if remainder: + debug( + "Remainder: argv[{!r}:][1:] => {!r}".format(ddash, remainder) + ) + for index, token in enumerate(body): + # Handle non-space-delimited forms, if not currently expecting a + # flag value and still in valid parsing territory (i.e. not in + # "unknown" state which implies store-only) + # NOTE: we do this in a few steps so we can + # split-then-check-validity; necessary for things like when the + # previously seen flag optionally takes a value. + mutations = [] + orig = token + if is_flag(token) and not machine.result.unparsed: + # Equals-sign-delimited flags, eg --foo=bar or -f=bar + if "=" in token: + token, _, value = token.partition("=") + msg = "Splitting x=y expr {!r} into tokens {!r} and {!r}" + debug(msg.format(orig, token, value)) + mutations.append((index + 1, value)) + # Contiguous boolean short flags, e.g. -qv + elif not is_long_flag(token) and len(token) > 2: + full_token = token[:] + rest, token = token[2:], token[:2] + err = "Splitting {!r} into token {!r} and rest {!r}" + debug(err.format(full_token, token, rest)) + # Handle boolean flag block vs short-flag + value. Make + # sure not to test the token as a context flag if we've + # passed into 'storing unknown stuff' territory (e.g. on a + # core-args pass, handling what are going to be task args) + have_flag = ( + token in machine.context.flags + and machine.current_state != "unknown" + ) + if have_flag and machine.context.flags[token].takes_value: + msg = "{!r} is a flag for current context & it takes a value, giving it {!r}" # noqa + debug(msg.format(token, rest)) + mutations.append((index + 1, rest)) + else: + _rest = ["-{}".format(x) for x in rest] + msg = "Splitting multi-flag glob {!r} into {!r} and {!r}" # noqa + debug(msg.format(orig, token, _rest)) + for item in reversed(_rest): + mutations.append((index + 1, item)) + # Here, we've got some possible mutations queued up, and 'token' + # may have been overwritten as well. Whether we apply those and + # continue as-is, or roll it back, depends: + # - If the parser wasn't waiting for a flag value, we're already on + # the right track, so apply mutations and move along to the + # handle() step. + # - If we ARE waiting for a value, and the flag expecting it ALWAYS + # wants a value (it's not optional), we go back to using the + # original token. (TODO: could reorganize this to avoid the + # sub-parsing in this case, but optimizing for human-facing + # execution isn't critical.) + # - Finally, if we are waiting for a value AND it's optional, we + # inspect the first sub-token/mutation to see if it would otherwise + # have been a valid flag, and let that determine what we do (if + # valid, we apply the mutations; if invalid, we reinstate the + # original token.) + if machine.waiting_for_flag_value: + optional = machine.flag and machine.flag.optional + subtoken_is_valid_flag = token in machine.context.flags + if not (optional and subtoken_is_valid_flag): + token = orig + mutations = [] + for index, value in mutations: + body.insert(index, value) + machine.handle(token) + machine.finish() + result = machine.result + result.remainder = " ".join(remainder) + return result + + +class ParseMachine(StateMachine): + initial_state = "context" + + state("context", enter=["complete_flag", "complete_context"]) + state("unknown", enter=["complete_flag", "complete_context"]) + state("end", enter=["complete_flag", "complete_context"]) + + transition(from_=("context", "unknown"), event="finish", to="end") + transition( + from_="context", + event="see_context", + action="switch_to_context", + to="context", + ) + transition( + from_=("context", "unknown"), + event="see_unknown", + action="store_only", + to="unknown", + ) + + def changing_state(self, from_: str, to: str) -> None: + debug("ParseMachine: {!r} => {!r}".format(from_, to)) + + def __init__( + self, + initial: "ParserContext", + contexts: Lexicon, + ignore_unknown: bool, + ) -> None: + # Initialize + self.ignore_unknown = ignore_unknown + self.initial = self.context = copy.deepcopy(initial) + debug("Initialized with context: {!r}".format(self.context)) + self.flag = None + self.flag_got_value = False + self.result = ParseResult() + self.contexts = copy.deepcopy(contexts) + debug("Available contexts: {!r}".format(self.contexts)) + # In case StateMachine does anything in __init__ + super().__init__() + + @property + def waiting_for_flag_value(self) -> bool: + # Do we have a current flag, and does it expect a value (vs being a + # bool/toggle)? + takes_value = self.flag and self.flag.takes_value + if not takes_value: + return False + # OK, this flag is one that takes values. + # Is it a list type (which has only just been switched to)? Then it'll + # always accept more values. + # TODO: how to handle somebody wanting it to be some other iterable + # like tuple or custom class? Or do we just say unsupported? + if self.flag.kind is list and not self.flag_got_value: + return True + # Not a list, okay. Does it already have a value? + has_value = self.flag.raw_value is not None + # If it doesn't have one, we're waiting for one (which tells the parser + # how to proceed and typically to store the next token.) + # TODO: in the negative case here, we should do something else instead: + # - Except, "hey you screwed up, you already gave that flag!" + # - Overwrite, "oh you changed your mind?" - which requires more work + # elsewhere too, unfortunately. (Perhaps additional properties on + # Argument that can be queried, e.g. "arg.is_iterable"?) + return not has_value + + def handle(self, token: str) -> None: + debug("Handling token: {!r}".format(token)) + # Handle unknown state at the top: we don't care about even + # possibly-valid input if we've encountered unknown input. + if self.current_state == "unknown": + debug("Top-of-handle() see_unknown({!r})".format(token)) + self.see_unknown(token) + return + # Flag + if self.context and token in self.context.flags: + debug("Saw flag {!r}".format(token)) + self.switch_to_flag(token) + elif self.context and token in self.context.inverse_flags: + debug("Saw inverse flag {!r}".format(token)) + self.switch_to_flag(token, inverse=True) + # Value for current flag + elif self.waiting_for_flag_value: + debug( + "We're waiting for a flag value so {!r} must be it?".format( + token + ) + ) # noqa + self.see_value(token) + # Positional args (must come above context-name check in case we still + # need a posarg and the user legitimately wants to give it a value that + # just happens to be a valid context name.) + elif self.context and self.context.missing_positional_args: + msg = "Context {!r} requires positional args, eating {!r}" + debug(msg.format(self.context, token)) + self.see_positional_arg(token) + # New context + elif token in self.contexts: + self.see_context(token) + # Initial-context flag being given as per-task flag (e.g. --help) + elif self.initial and token in self.initial.flags: + debug("Saw (initial-context) flag {!r}".format(token)) + flag = self.initial.flags[token] + # Special-case for core --help flag: context name is used as value. + if flag.name == "help": + flag.value = self.context.name + msg = "Saw --help in a per-task context, setting task name ({!r}) as its value" # noqa + debug(msg.format(flag.value)) + # All others: just enter the 'switch to flag' parser state + else: + # TODO: handle inverse core flags too? There are none at the + # moment (e.g. --no-dedupe is actually 'no_dedupe', not a + # default-False 'dedupe') and it's up to us whether we actually + # put any in place. + self.switch_to_flag(token) + # Unknown + else: + if not self.ignore_unknown: + debug("Can't find context named {!r}, erroring".format(token)) + self.error("No idea what {!r} is!".format(token)) + else: + debug("Bottom-of-handle() see_unknown({!r})".format(token)) + self.see_unknown(token) + + def store_only(self, token: str) -> None: + # Start off the unparsed list + debug("Storing unknown token {!r}".format(token)) + self.result.unparsed.append(token) + + def complete_context(self) -> None: + debug( + "Wrapping up context {!r}".format( + self.context.name if self.context else self.context + ) + ) + # Ensure all of context's positional args have been given. + if self.context and self.context.missing_positional_args: + err = "'{}' did not receive required positional arguments: {}" + names = ", ".join( + "'{}'".format(x.name) + for x in self.context.missing_positional_args + ) + self.error(err.format(self.context.name, names)) + if self.context and self.context not in self.result: + self.result.append(self.context) + + def switch_to_context(self, name: str) -> None: + self.context = copy.deepcopy(self.contexts[name]) + debug("Moving to context {!r}".format(name)) + debug("Context args: {!r}".format(self.context.args)) + debug("Context flags: {!r}".format(self.context.flags)) + debug("Context inverse_flags: {!r}".format(self.context.inverse_flags)) + + def complete_flag(self) -> None: + if self.flag: + msg = "Completing current flag {} before moving on" + debug(msg.format(self.flag)) + # Barf if we needed a value and didn't get one + if ( + self.flag + and self.flag.takes_value + and self.flag.raw_value is None + and not self.flag.optional + ): + err = "Flag {!r} needed value and was not given one!" + self.error(err.format(self.flag)) + # Handle optional-value flags; at this point they were not given an + # explicit value, but they were seen, ergo they should get treated like + # bools. + if self.flag and self.flag.raw_value is None and self.flag.optional: + msg = "Saw optional flag {!r} go by w/ no value; setting to True" + debug(msg.format(self.flag.name)) + # Skip casting so the bool gets preserved + self.flag.set_value(True, cast=False) + + def check_ambiguity(self, value: Any) -> bool: + """ + Guard against ambiguity when current flag takes an optional value. + + .. versionadded:: 1.0 + """ + # No flag is currently being examined, or one is but it doesn't take an + # optional value? Ambiguity isn't possible. + if not (self.flag and self.flag.optional): + return False + # We *are* dealing with an optional-value flag, but it's already + # received a value? There can't be ambiguity here either. + if self.flag.raw_value is not None: + return False + # Otherwise, there *may* be ambiguity if 1 or more of the below tests + # fail. + tests = [] + # Unfilled posargs still exist? + tests.append(self.context and self.context.missing_positional_args) + # Value matches another valid task/context name? + tests.append(value in self.contexts) + if any(tests): + msg = "{!r} is ambiguous when given after an optional-value flag" + raise ParseError(msg.format(value)) + + def switch_to_flag(self, flag: str, inverse: bool = False) -> None: + # Sanity check for ambiguity w/ prior optional-value flag + self.check_ambiguity(flag) + # Also tie it off, in case prior had optional value or etc. Seems to be + # harmless for other kinds of flags. (TODO: this is a serious indicator + # that we need to move some of this flag-by-flag bookkeeping into the + # state machine bits, if possible - as-is it was REAL confusing re: why + # this was manually required!) + self.complete_flag() + # Set flag/arg obj + flag = self.context.inverse_flags[flag] if inverse else flag + # Update state + try: + self.flag = self.context.flags[flag] + except KeyError as e: + # Try fallback to initial/core flag + try: + self.flag = self.initial.flags[flag] + except KeyError: + # If it wasn't in either, raise the original context's + # exception, as that's more useful / correct. + raise e + debug("Moving to flag {!r}".format(self.flag)) + # Bookkeeping for iterable-type flags (where the typical 'value + # non-empty/nondefault -> clearly it got its value already' test is + # insufficient) + self.flag_got_value = False + # Handle boolean flags (which can immediately be updated) + if self.flag and not self.flag.takes_value: + val = not inverse + debug("Marking seen flag {!r} as {}".format(self.flag, val)) + self.flag.value = val + + def see_value(self, value: Any) -> None: + self.check_ambiguity(value) + if self.flag and self.flag.takes_value: + debug("Setting flag {!r} to value {!r}".format(self.flag, value)) + self.flag.value = value + self.flag_got_value = True + else: + self.error("Flag {!r} doesn't take any value!".format(self.flag)) + + def see_positional_arg(self, value: Any) -> None: + for arg in self.context.positional_args: + if arg.value is None: + arg.value = value + break + + def error(self, msg: str) -> None: + raise ParseError(msg, self.context) diff --git a/.venv/lib/python3.9/site-packages/invoke/program.py b/.venv/lib/python3.9/site-packages/invoke/program.py new file mode 100644 index 0000000..c7e5cd0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/program.py @@ -0,0 +1,987 @@ +import getpass +import inspect +import json +import os +import sys +import textwrap +from importlib import import_module # buffalo buffalo +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Optional, + Sequence, + Tuple, + Type, +) + +from . import Collection, Config, Executor, FilesystemLoader +from .completion.complete import complete, print_completion_script +from .parser import Parser, ParserContext, Argument +from .exceptions import UnexpectedExit, CollectionNotFound, ParseError, Exit +from .terminals import pty_size +from .util import debug, enable_logging, helpline + +if TYPE_CHECKING: + from .loader import Loader + from .parser import ParseResult + from .util import Lexicon + + +class Program: + """ + Manages top-level CLI invocation, typically via ``setup.py`` entrypoints. + + Designed for distributing Invoke task collections as standalone programs, + but also used internally to implement the ``invoke`` program itself. + + .. seealso:: + :ref:`reusing-as-a-binary` for a tutorial/walkthrough of this + functionality. + + .. versionadded:: 1.0 + """ + + core: "ParseResult" + + def core_args(self) -> List["Argument"]: + """ + Return default core `.Argument` objects, as a list. + + .. versionadded:: 1.0 + """ + # Arguments present always, even when wrapped as a different binary + return [ + Argument( + names=("command-timeout", "T"), + kind=int, + help="Specify a global command execution timeout, in seconds.", + ), + Argument( + names=("complete",), + kind=bool, + default=False, + help="Print tab-completion candidates for given parse remainder.", # noqa + ), + Argument( + names=("config", "f"), + help="Runtime configuration file to use.", + ), + Argument( + names=("debug", "d"), + kind=bool, + default=False, + help="Enable debug output.", + ), + Argument( + names=("dry", "R"), + kind=bool, + default=False, + help="Echo commands instead of running.", + ), + Argument( + names=("echo", "e"), + kind=bool, + default=False, + help="Echo executed commands before running.", + ), + Argument( + names=("help", "h"), + optional=True, + help="Show core or per-task help and exit.", + ), + Argument( + names=("hide",), + help="Set default value of run()'s 'hide' kwarg.", + ), + Argument( + names=("list", "l"), + optional=True, + help="List available tasks, optionally limited to a namespace.", # noqa + ), + Argument( + names=("list-depth", "D"), + kind=int, + default=0, + help="When listing tasks, only show the first INT levels.", + ), + Argument( + names=("list-format", "F"), + help="Change the display format used when listing tasks. Should be one of: flat (default), nested, json.", # noqa + default="flat", + ), + Argument( + names=("print-completion-script",), + kind=str, + default="", + help="Print the tab-completion script for your preferred shell (bash|zsh|fish).", # noqa + ), + Argument( + names=("prompt-for-sudo-password",), + kind=bool, + default=False, + help="Prompt user at start of session for the sudo.password config value.", # noqa + ), + Argument( + names=("pty", "p"), + kind=bool, + default=False, + help="Use a pty when executing shell commands.", + ), + Argument( + names=("version", "V"), + kind=bool, + default=False, + help="Show version and exit.", + ), + Argument( + names=("warn-only", "w"), + kind=bool, + default=False, + help="Warn, instead of failing, when shell commands fail.", + ), + Argument( + names=("write-pyc",), + kind=bool, + default=False, + help="Enable creation of .pyc files.", + ), + ] + + def task_args(self) -> List["Argument"]: + """ + Return default task-related `.Argument` objects, as a list. + + These are only added to the core args in "task runner" mode (the + default for ``invoke`` itself) - they are omitted when the constructor + is given a non-empty ``namespace`` argument ("bundled namespace" mode). + + .. versionadded:: 1.0 + """ + # Arguments pertaining specifically to invocation as 'invoke' itself + # (or as other arbitrary-task-executing programs, like 'fab') + return [ + Argument( + names=("collection", "c"), + help="Specify collection name to load.", + ), + Argument( + names=("no-dedupe",), + kind=bool, + default=False, + help="Disable task deduplication.", + ), + Argument( + names=("search-root", "r"), + help="Change root directory used for finding task modules.", + ), + ] + + argv: List[str] + # Other class-level global variables a subclass might override sometime + # maybe? + leading_indent_width = 2 + leading_indent = " " * leading_indent_width + indent_width = 4 + indent = " " * indent_width + col_padding = 3 + + def __init__( + self, + version: Optional[str] = None, + namespace: Optional["Collection"] = None, + name: Optional[str] = None, + binary: Optional[str] = None, + loader_class: Optional[Type["Loader"]] = None, + executor_class: Optional[Type["Executor"]] = None, + config_class: Optional[Type["Config"]] = None, + binary_names: Optional[List[str]] = None, + ) -> None: + """ + Create a new, parameterized `.Program` instance. + + :param str version: + The program's version, e.g. ``"0.1.0"``. Defaults to ``"unknown"``. + + :param namespace: + A `.Collection` to use as this program's subcommands. + + If ``None`` (the default), the program will behave like ``invoke``, + seeking a nearby task namespace with a `.Loader` and exposing + arguments such as :option:`--list` and :option:`--collection` for + inspecting or selecting specific namespaces. + + If given a `.Collection` object, will use it as if it had been + handed to :option:`--collection`. Will also update the parser to + remove references to tasks and task-related options, and display + the subcommands in ``--help`` output. The result will be a program + that has a static set of subcommands. + + :param str name: + The program's name, as displayed in ``--version`` output. + + If ``None`` (default), is a capitalized version of the first word + in the ``argv`` handed to `.run`. For example, when invoked from a + binstub installed as ``foobar``, it will default to ``Foobar``. + + :param str binary: + Descriptive lowercase binary name string used in help text. + + For example, Invoke's own internal value for this is ``inv[oke]``, + denoting that it is installed as both ``inv`` and ``invoke``. As + this is purely text intended for help display, it may be in any + format you wish, though it should match whatever you've put into + your ``setup.py``'s ``console_scripts`` entry. + + If ``None`` (default), uses the first word in ``argv`` verbatim (as + with ``name`` above, except not capitalized). + + :param binary_names: + List of binary name strings, for use in completion scripts. + + This list ensures that the shell completion scripts generated by + :option:`--print-completion-script` instruct the shell to use + that completion for all of this program's installed names. + + For example, Invoke's internal default for this is ``["inv", + "invoke"]``. + + If ``None`` (the default), the first word in ``argv`` (in the + invocation of :option:`--print-completion-script`) is used in a + single-item list. + + :param loader_class: + The `.Loader` subclass to use when loading task collections. + + Defaults to `.FilesystemLoader`. + + :param executor_class: + The `.Executor` subclass to use when executing tasks. + + Defaults to `.Executor`; may also be overridden at runtime by the + :ref:`configuration system ` and its + ``tasks.executor_class`` setting (anytime that setting is not + ``None``). + + :param config_class: + The `.Config` subclass to use for the base config object. + + Defaults to `.Config`. + + .. versionchanged:: 1.2 + Added the ``binary_names`` argument. + """ + self.version = "unknown" if version is None else version + self.namespace = namespace + self._name = name + # TODO 3.0: rename binary to binary_help_name or similar. (Or write + # code to autogenerate it from binary_names.) + self._binary = binary + self._binary_names = binary_names + self.argv = [] + self.loader_class = loader_class or FilesystemLoader + self.executor_class = executor_class or Executor + self.config_class = config_class or Config + + def create_config(self) -> None: + """ + Instantiate a `.Config` (or subclass, depending) for use in task exec. + + This Config is fully usable but will lack runtime-derived data like + project & runtime config files, CLI arg overrides, etc. That data is + added later in `update_config`. See `.Config` docstring for lifecycle + details. + + :returns: ``None``; sets ``self.config`` instead. + + .. versionadded:: 1.0 + """ + self.config = self.config_class() + + def update_config(self, merge: bool = True) -> None: + """ + Update the previously instantiated `.Config` with parsed data. + + For example, this is how ``--echo`` is able to override the default + config value for ``run.echo``. + + :param bool merge: + Whether to merge at the end, or defer. Primarily useful for + subclassers. Default: ``True``. + + .. versionadded:: 1.0 + """ + # Now that we have parse results handy, we can grab the remaining + # config bits: + # - runtime config, as it is dependent on the runtime flag/env var + # - the overrides config level, as it is composed of runtime flag data + # NOTE: only fill in values that would alter behavior, otherwise we + # want the defaults to come through. + run = {} + if self.args["warn-only"].value: + run["warn"] = True + if self.args.pty.value: + run["pty"] = True + if self.args.hide.value: + run["hide"] = self.args.hide.value + if self.args.echo.value: + run["echo"] = True + if self.args.dry.value: + run["dry"] = True + tasks = {} + if "no-dedupe" in self.args and self.args["no-dedupe"].value: + tasks["dedupe"] = False + timeouts = {} + command = self.args["command-timeout"].value + if command: + timeouts["command"] = command + # Handle "fill in config values at start of runtime", which for now is + # just sudo password + sudo = {} + if self.args["prompt-for-sudo-password"].value: + prompt = "Desired 'sudo.password' config value: " + sudo["password"] = getpass.getpass(prompt) + overrides = dict(run=run, tasks=tasks, sudo=sudo, timeouts=timeouts) + self.config.load_overrides(overrides, merge=False) + runtime_path = self.args.config.value + if runtime_path is None: + runtime_path = os.environ.get("INVOKE_RUNTIME_CONFIG", None) + self.config.set_runtime_path(runtime_path) + self.config.load_runtime(merge=False) + if merge: + self.config.merge() + + def run(self, argv: Optional[List[str]] = None, exit: bool = True) -> None: + """ + Execute main CLI logic, based on ``argv``. + + :param argv: + The arguments to execute against. May be ``None``, a list of + strings, or a string. See `.normalize_argv` for details. + + :param bool exit: + When ``False`` (default: ``True``), will ignore `.ParseError`, + `.Exit` and `.Failure` exceptions, which otherwise trigger calls to + `sys.exit`. + + .. note:: + This is mostly a concession to testing. If you're setting this + to ``False`` in a production setting, you should probably be + using `.Executor` and friends directly instead! + + .. versionadded:: 1.0 + """ + try: + # Create an initial config, which will hold defaults & values from + # most config file locations (all but runtime.) Used to inform + # loading & parsing behavior. + self.create_config() + # Parse the given ARGV with our CLI parsing machinery, resulting in + # things like self.args (core args/flags), self.collection (the + # loaded namespace, which may be affected by the core flags) and + # self.tasks (the tasks requested for exec and their own + # args/flags) + self.parse_core(argv) + # Handle collection concerns including project config + self.parse_collection() + # Parse remainder of argv as task-related input + self.parse_tasks() + # End of parsing (typically bailout stuff like --list, --help) + self.parse_cleanup() + # Update the earlier Config with new values from the parse step - + # runtime config file contents and flag-derived overrides (e.g. for + # run()'s echo, warn, etc options.) + self.update_config() + # Create an Executor, passing in the data resulting from the prior + # steps, then tell it to execute the tasks. + self.execute() + except (UnexpectedExit, Exit, ParseError) as e: + debug("Received a possibly-skippable exception: {!r}".format(e)) + # Print error messages from parser, runner, etc if necessary; + # prevents messy traceback but still clues interactive user into + # problems. + if isinstance(e, ParseError): + print(e, file=sys.stderr) + if isinstance(e, Exit) and e.message: + print(e.message, file=sys.stderr) + if isinstance(e, UnexpectedExit) and e.result.hide: + print(e, file=sys.stderr, end="") + # Terminate execution unless we were told not to. + if exit: + if isinstance(e, UnexpectedExit): + code = e.result.exited + elif isinstance(e, Exit): + code = e.code + elif isinstance(e, ParseError): + code = 1 + sys.exit(code) + else: + debug("Invoked as run(..., exit=False), ignoring exception") + except KeyboardInterrupt: + sys.exit(1) # Same behavior as Python itself outside of REPL + + def parse_core(self, argv: Optional[List[str]]) -> None: + debug("argv given to Program.run: {!r}".format(argv)) + self.normalize_argv(argv) + + # Obtain core args (sets self.core) + self.parse_core_args() + debug("Finished parsing core args") + + # Set interpreter bytecode-writing flag + sys.dont_write_bytecode = not self.args["write-pyc"].value + + # Enable debugging from here on out, if debug flag was given. + # (Prior to this point, debugging requires setting INVOKE_DEBUG). + if self.args.debug.value: + enable_logging() + + # Short-circuit if --version + if self.args.version.value: + debug("Saw --version, printing version & exiting") + self.print_version() + raise Exit + + # Print (dynamic, no tasks required) completion script if requested + if self.args["print-completion-script"].value: + print_completion_script( + shell=self.args["print-completion-script"].value, + names=self.binary_names, + ) + raise Exit + + def parse_collection(self) -> None: + """ + Load a tasks collection & project-level config. + + .. versionadded:: 1.0 + """ + # Load a collection of tasks unless one was already set. + if self.namespace is not None: + debug( + "Program was given default namespace, not loading collection" + ) + self.collection = self.namespace + else: + debug( + "No default namespace provided, trying to load one from disk" + ) # noqa + # If no bundled namespace & --help was given, just print it and + # exit. (If we did have a bundled namespace, core --help will be + # handled *after* the collection is loaded & parsing is done.) + if self.args.help.value is True: + debug( + "No bundled namespace & bare --help given; printing help." + ) + self.print_help() + raise Exit + self.load_collection() + # Set these up for potential use later when listing tasks + # TODO: be nice if these came from the config...! Users would love to + # say they default to nested for example. Easy 2.x feature-add. + self.list_root: Optional[str] = None + self.list_depth: Optional[int] = None + self.list_format = "flat" + self.scoped_collection = self.collection + + # TODO: load project conf, if possible, gracefully + + def parse_cleanup(self) -> None: + """ + Post-parsing, pre-execution steps such as --help, --list, etc. + + .. versionadded:: 1.0 + """ + halp = self.args.help.value + + # Core (no value given) --help output (only when bundled namespace) + if halp is True: + debug("Saw bare --help, printing help & exiting") + self.print_help() + raise Exit + + # Print per-task help, if necessary + if halp: + if halp in self.parser.contexts: + msg = "Saw --help , printing per-task help & exiting" + debug(msg) + self.print_task_help(halp) + raise Exit + else: + # TODO: feels real dumb to factor this out of Parser, but...we + # should? + raise ParseError("No idea what '{}' is!".format(halp)) + + # Print discovered tasks if necessary + list_root = self.args.list.value # will be True or string + self.list_format = self.args["list-format"].value + self.list_depth = self.args["list-depth"].value + if list_root: + # Not just --list, but --list some-root - do moar work + if isinstance(list_root, str): + self.list_root = list_root + try: + sub = self.collection.subcollection_from_path(list_root) + self.scoped_collection = sub + except KeyError: + msg = "Sub-collection '{}' not found!" + raise Exit(msg.format(list_root)) + self.list_tasks() + raise Exit + + # Print completion helpers if necessary + if self.args.complete.value: + complete( + names=self.binary_names, + core=self.core, + initial_context=self.initial_context, + collection=self.collection, + # NOTE: can't reuse self.parser as it has likely been mutated + # between when it was set and now. + parser=self._make_parser(), + ) + + # Fallback behavior if no tasks were given & no default specified + # (mostly a subroutine for overriding purposes) + # NOTE: when there is a default task, Executor will select it when no + # tasks were found in CLI parsing. + if not self.tasks and not self.collection.default: + self.no_tasks_given() + + def no_tasks_given(self) -> None: + debug( + "No tasks specified for execution and no default task; printing global help as fallback" # noqa + ) + self.print_help() + raise Exit + + def execute(self) -> None: + """ + Hand off data & tasks-to-execute specification to an `.Executor`. + + .. note:: + Client code just wanting a different `.Executor` subclass can just + set ``executor_class`` in `.__init__`, or override + ``tasks.executor_class`` anywhere in the :ref:`config system + ` (which may allow you to avoid using a custom + Program entirely). + + .. versionadded:: 1.0 + """ + klass = self.executor_class + config_path = self.config.tasks.executor_class + if config_path is not None: + # TODO: why the heck is this not builtin to importlib? + module_path, _, class_name = config_path.rpartition(".") + # TODO: worth trying to wrap both of these and raising ImportError + # for cases where module exists but class name does not? More + # "normal" but also its own possible source of bugs/confusion... + module = import_module(module_path) + klass = getattr(module, class_name) + executor = klass(self.collection, self.config, self.core) + executor.execute(*self.tasks) + + def normalize_argv(self, argv: Optional[List[str]]) -> None: + """ + Massages ``argv`` into a useful list of strings. + + **If None** (the default), uses `sys.argv`. + + **If a non-string iterable**, uses that in place of `sys.argv`. + + **If a string**, performs a `str.split` and then executes with the + result. (This is mostly a convenience; when in doubt, use a list.) + + Sets ``self.argv`` to the result. + + .. versionadded:: 1.0 + """ + if argv is None: + argv = sys.argv + debug("argv was None; using sys.argv: {!r}".format(argv)) + elif isinstance(argv, str): + argv = argv.split() + debug("argv was string-like; splitting: {!r}".format(argv)) + self.argv = argv + + @property + def name(self) -> str: + """ + Derive program's human-readable name based on `.binary`. + + .. versionadded:: 1.0 + """ + return self._name or self.binary.capitalize() + + @property + def called_as(self) -> str: + """ + Returns the program name we were actually called as. + + Specifically, this is the (Python's os module's concept of a) basename + of the first argument in the parsed argument vector. + + .. versionadded:: 1.2 + """ + # XXX: defaults to empty string if 'argv' is '[]' or 'None' + return os.path.basename(self.argv[0]) if self.argv else "" + + @property + def binary(self) -> str: + """ + Derive program's help-oriented binary name(s) from init args & argv. + + .. versionadded:: 1.0 + """ + return self._binary or self.called_as + + @property + def binary_names(self) -> List[str]: + """ + Derive program's completion-oriented binary name(s) from args & argv. + + .. versionadded:: 1.2 + """ + return self._binary_names or [self.called_as] + + # TODO 3.0: ugh rename this or core_args, they are too confusing + @property + def args(self) -> "Lexicon": + """ + Obtain core program args from ``self.core`` parse result. + + .. versionadded:: 1.0 + """ + return self.core[0].args + + @property + def initial_context(self) -> ParserContext: + """ + The initial parser context, aka core program flags. + + The specific arguments contained therein will differ depending on + whether a bundled namespace was specified in `.__init__`. + + .. versionadded:: 1.0 + """ + args = self.core_args() + if self.namespace is None: + args += self.task_args() + return ParserContext(args=args) + + def print_version(self) -> None: + print("{} {}".format(self.name, self.version or "unknown")) + + def print_help(self) -> None: + usage_suffix = "task1 [--task1-opts] ... taskN [--taskN-opts]" + if self.namespace is not None: + usage_suffix = " [--subcommand-opts] ..." + print("Usage: {} [--core-opts] {}".format(self.binary, usage_suffix)) + print("") + print("Core options:") + print("") + self.print_columns(self.initial_context.help_tuples()) + if self.namespace is not None: + self.list_tasks() + + def parse_core_args(self) -> None: + """ + Filter out core args, leaving any tasks or their args for later. + + Sets ``self.core`` to the `.ParseResult` from this step. + + .. versionadded:: 1.0 + """ + debug("Parsing initial context (core args)") + parser = Parser(initial=self.initial_context, ignore_unknown=True) + self.core = parser.parse_argv(self.argv[1:]) + msg = "Core-args parse result: {!r} & unparsed: {!r}" + debug(msg.format(self.core, self.core.unparsed)) + + def load_collection(self) -> None: + """ + Load a task collection based on parsed core args, or die trying. + + .. versionadded:: 1.0 + """ + # NOTE: start, coll_name both fall back to configuration values within + # Loader (which may, however, get them from our config.) + start = self.args["search-root"].value + loader = self.loader_class( # type: ignore + config=self.config, start=start + ) + coll_name = self.args.collection.value + try: + module, parent = loader.load(coll_name) + # This is the earliest we can load project config, so we should - + # allows project config to affect the task parsing step! + # TODO: is it worth merging these set- and load- methods? May + # require more tweaking of how things behave in/after __init__. + self.config.set_project_location(parent) + self.config.load_project() + self.collection = Collection.from_module( + module, + loaded_from=parent, + auto_dash_names=self.config.tasks.auto_dash_names, + ) + except CollectionNotFound as e: + raise Exit("Can't find any collection named {!r}!".format(e.name)) + + def _update_core_context( + self, context: ParserContext, new_args: Dict[str, Any] + ) -> None: + # Update core context w/ core_via_task args, if and only if the + # via-task version of the arg was truly given a value. + # TODO: push this into an Argument-aware Lexicon subclass and + # .update()? + for key, arg in new_args.items(): + if arg.got_value: + context.args[key]._value = arg._value + + def _make_parser(self) -> Parser: + return Parser( + initial=self.initial_context, + contexts=self.collection.to_contexts( + ignore_unknown_help=self.config.tasks.ignore_unknown_help + ), + ) + + def parse_tasks(self) -> None: + """ + Parse leftover args, which are typically tasks & per-task args. + + Sets ``self.parser`` to the parser used, ``self.tasks`` to the + parsed per-task contexts, and ``self.core_via_tasks`` to a context + holding any core flags seen within the task contexts. + + Also modifies ``self.core`` to include the data from ``core_via_tasks`` + (so that it correctly reflects any supplied core flags regardless of + where they appeared). + + .. versionadded:: 1.0 + """ + self.parser = self._make_parser() + debug("Parsing tasks against {!r}".format(self.collection)) + result = self.parser.parse_argv(self.core.unparsed) + self.core_via_tasks = result.pop(0) + self._update_core_context( + context=self.core[0], new_args=self.core_via_tasks.args + ) + self.tasks = result + debug("Resulting task contexts: {!r}".format(self.tasks)) + + def print_task_help(self, name: str) -> None: + """ + Print help for a specific task, e.g. ``inv --help ``. + + .. versionadded:: 1.0 + """ + # Setup + ctx = self.parser.contexts[name] + tuples = ctx.help_tuples() + docstring = inspect.getdoc(self.collection[name]) + header = "Usage: {} [--core-opts] {} {}[other tasks here ...]" + opts = "[--options] " if tuples else "" + print(header.format(self.binary, name, opts)) + print("") + print("Docstring:") + if docstring: + # Really wish textwrap worked better for this. + for line in docstring.splitlines(): + if line.strip(): + print(self.leading_indent + line) + else: + print("") + print("") + else: + print(self.leading_indent + "none") + print("") + print("Options:") + if tuples: + self.print_columns(tuples) + else: + print(self.leading_indent + "none") + print("") + + def list_tasks(self) -> None: + # Short circuit if no tasks to show (Collection now implements bool) + focus = self.scoped_collection + if not focus: + msg = "No tasks found in collection '{}'!" + raise Exit(msg.format(focus.name)) + # TODO: now that flat/nested are almost 100% unified, maybe rethink + # this a bit? + getattr(self, "list_{}".format(self.list_format))() + + def list_flat(self) -> None: + pairs = self._make_pairs(self.scoped_collection) + self.display_with_columns(pairs=pairs) + + def list_nested(self) -> None: + pairs = self._make_pairs(self.scoped_collection) + extra = "'*' denotes collection defaults" + self.display_with_columns(pairs=pairs, extra=extra) + + def _make_pairs( + self, + coll: "Collection", + ancestors: Optional[List[str]] = None, + ) -> List[Tuple[str, Optional[str]]]: + if ancestors is None: + ancestors = [] + pairs = [] + indent = len(ancestors) * self.indent + ancestor_path = ".".join(x for x in ancestors) + for name, task in sorted(coll.tasks.items()): + is_default = name == coll.default + # Start with just the name and just the aliases, no prefixes or + # dots. + displayname = name + aliases = list(map(coll.transform, sorted(task.aliases))) + # If displaying a sub-collection (or if we are displaying a given + # namespace/root), tack on some dots to make it clear these names + # require dotted paths to invoke. + if ancestors or self.list_root: + displayname = ".{}".format(displayname) + aliases = [".{}".format(x) for x in aliases] + # Nested? Indent, and add asterisks to default-tasks. + if self.list_format == "nested": + prefix = indent + if is_default: + displayname += "*" + # Flat? Prefix names and aliases with ancestor names to get full + # dotted path; and give default-tasks their collection name as the + # first alias. + if self.list_format == "flat": + prefix = ancestor_path + # Make sure leading dots are present for subcollections if + # scoped display + if prefix and self.list_root: + prefix = "." + prefix + aliases = [prefix + alias for alias in aliases] + if is_default and ancestors: + aliases.insert(0, prefix) + # Generate full name and help columns and add to pairs. + alias_str = " ({})".format(", ".join(aliases)) if aliases else "" + full = prefix + displayname + alias_str + pairs.append((full, helpline(task))) + # Determine whether we're at max-depth or not + truncate = self.list_depth and (len(ancestors) + 1) >= self.list_depth + for name, subcoll in sorted(coll.collections.items()): + displayname = name + if ancestors or self.list_root: + displayname = ".{}".format(displayname) + if truncate: + tallies = [ + "{} {}".format(len(getattr(subcoll, attr)), attr) + for attr in ("tasks", "collections") + if getattr(subcoll, attr) + ] + displayname += " [{}]".format(", ".join(tallies)) + if self.list_format == "nested": + pairs.append((indent + displayname, helpline(subcoll))) + elif self.list_format == "flat" and truncate: + # NOTE: only adding coll-oriented pair if limiting by depth + pairs.append((ancestor_path + displayname, helpline(subcoll))) + # Recurse, if not already at max depth + if not truncate: + recursed_pairs = self._make_pairs( + coll=subcoll, ancestors=ancestors + [name] + ) + pairs.extend(recursed_pairs) + return pairs + + def list_json(self) -> None: + # Sanity: we can't cleanly honor the --list-depth argument without + # changing the data schema or otherwise acting strangely; and it also + # doesn't make a ton of sense to limit depth when the output is for a + # script to handle. So we just refuse, for now. TODO: find better way + if self.list_depth: + raise Exit( + "The --list-depth option is not supported with JSON format!" + ) # noqa + # TODO: consider using something more formal re: the format this emits, + # eg json-schema or whatever. Would simplify the + # relatively-concise-but-only-human docs that currently describe this. + coll = self.scoped_collection + data = coll.serialized() + print(json.dumps(data)) + + def task_list_opener(self, extra: str = "") -> str: + root = self.list_root + depth = self.list_depth + specifier = " '{}'".format(root) if root else "" + tail = "" + if depth or extra: + depthstr = "depth={}".format(depth) if depth else "" + joiner = "; " if (depth and extra) else "" + tail = " ({}{}{})".format(depthstr, joiner, extra) + text = "Available{} tasks{}".format(specifier, tail) + # TODO: do use cases w/ bundled namespace want to display things like + # root and depth too? Leaving off for now... + if self.namespace is not None: + text = "Subcommands" + return text + + def display_with_columns( + self, pairs: Sequence[Tuple[str, Optional[str]]], extra: str = "" + ) -> None: + root = self.list_root + print("{}:\n".format(self.task_list_opener(extra=extra))) + self.print_columns(pairs) + # TODO: worth stripping this out for nested? since it's signified with + # asterisk there? ugggh + default = self.scoped_collection.default + if default: + specific = "" + if root: + specific = " '{}'".format(root) + default = ".{}".format(default) + # TODO: trim/prefix dots + print("Default{} task: {}\n".format(specific, default)) + + def print_columns( + self, tuples: Sequence[Tuple[str, Optional[str]]] + ) -> None: + """ + Print tabbed columns from (name, help) ``tuples``. + + Useful for listing tasks + docstrings, flags + help strings, etc. + + .. versionadded:: 1.0 + """ + # Calculate column sizes: don't wrap flag specs, give what's left over + # to the descriptions. + name_width = max(len(x[0]) for x in tuples) + desc_width = ( + pty_size()[0] + - name_width + - self.leading_indent_width + - self.col_padding + - 1 + ) + wrapper = textwrap.TextWrapper(width=desc_width) + for name, help_str in tuples: + if help_str is None: + help_str = "" + # Wrap descriptions/help text + help_chunks = wrapper.wrap(help_str) + # Print flag spec + padding + name_padding = name_width - len(name) + spec = "".join( + ( + self.leading_indent, + name, + name_padding * " ", + self.col_padding * " ", + ) + ) + # Print help text as needed + if help_chunks: + print(spec + help_chunks[0]) + for chunk in help_chunks[1:]: + print((" " * len(spec)) + chunk) + else: + print(spec.rstrip()) + print("") diff --git a/.venv/lib/python3.9/site-packages/invoke/py.typed b/.venv/lib/python3.9/site-packages/invoke/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/invoke/runners.py b/.venv/lib/python3.9/site-packages/invoke/runners.py new file mode 100644 index 0000000..f1c888f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/runners.py @@ -0,0 +1,1675 @@ +import errno +import locale +import os +import struct +import sys +import threading +import time +import signal +from subprocess import Popen, PIPE +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Generator, + IO, + List, + Optional, + Tuple, + Type, +) + +# Import some platform-specific things at top level so they can be mocked for +# tests. +try: + import pty +except ImportError: + pty = None # type: ignore[assignment] +try: + import fcntl +except ImportError: + fcntl = None # type: ignore[assignment] +try: + import termios +except ImportError: + termios = None # type: ignore[assignment] + +from .exceptions import ( + UnexpectedExit, + Failure, + ThreadException, + WatcherError, + SubprocessPipeError, + CommandTimedOut, +) +from .terminals import ( + WINDOWS, + pty_size, + character_buffered, + ready_for_reading, + bytes_to_read, +) +from .util import has_fileno, isatty, ExceptionHandlingThread + +if TYPE_CHECKING: + from .context import Context + from .watchers import StreamWatcher + + +class Runner: + """ + Partially-abstract core command-running API. + + This class is not usable by itself and must be subclassed, implementing a + number of methods such as `start`, `wait` and `returncode`. For a subclass + implementation example, see the source code for `.Local`. + + .. versionadded:: 1.0 + """ + + opts: Dict[str, Any] + using_pty: bool + read_chunk_size = 1000 + input_sleep = 0.01 + + def __init__(self, context: "Context") -> None: + """ + Create a new runner with a handle on some `.Context`. + + :param context: + a `.Context` instance, used to transmit default options and provide + access to other contextualized information (e.g. a remote-oriented + `.Runner` might want a `.Context` subclass holding info about + hostnames and ports.) + + .. note:: + The `.Context` given to `.Runner` instances **must** contain + default config values for the `.Runner` class in question. At a + minimum, this means values for each of the default + `.Runner.run` keyword arguments such as ``echo`` and ``warn``. + + :raises exceptions.ValueError: + if not all expected default values are found in ``context``. + """ + #: The `.Context` given to the same-named argument of `__init__`. + self.context = context + #: A `threading.Event` signaling program completion. + #: + #: Typically set after `wait` returns. Some IO mechanisms rely on this + #: to know when to exit an infinite read loop. + self.program_finished = threading.Event() + # I wish Sphinx would organize all class/instance attrs in the same + # place. If I don't do this here, it goes 'class vars -> __init__ + # docstring -> instance vars' :( TODO: consider just merging class and + # __init__ docstrings, though that's annoying too. + #: How many bytes (at maximum) to read per iteration of stream reads. + self.read_chunk_size = self.__class__.read_chunk_size + # Ditto re: declaring this in 2 places for doc reasons. + #: How many seconds to sleep on each iteration of the stdin read loop + #: and other otherwise-fast loops. + self.input_sleep = self.__class__.input_sleep + #: Whether pty fallback warning has been emitted. + self.warned_about_pty_fallback = False + #: A list of `.StreamWatcher` instances for use by `respond`. Is filled + #: in at runtime by `run`. + self.watchers: List["StreamWatcher"] = [] + # Optional timeout timer placeholder + self._timer: Optional[threading.Timer] = None + # Async flags (initialized for 'finally' referencing in case something + # goes REAL bad during options parsing) + self._asynchronous = False + self._disowned = False + + def run(self, command: str, **kwargs: Any) -> Optional["Result"]: + """ + Execute ``command``, returning an instance of `Result` once complete. + + By default, this method is synchronous (it only returns once the + subprocess has completed), and allows interactive keyboard + communication with the subprocess. + + It can instead behave asynchronously (returning early & requiring + interaction with the resulting object to manage subprocess lifecycle) + if you specify ``asynchronous=True``. Furthermore, you can completely + disassociate the subprocess from Invoke's control (allowing it to + persist on its own after Python exits) by saying ``disown=True``. See + the per-kwarg docs below for details on both of these. + + .. note:: + All kwargs will default to the values found in this instance's + `~.Runner.context` attribute, specifically in its configuration's + ``run`` subtree (e.g. ``run.echo`` provides the default value for + the ``echo`` keyword, etc). The base default values are described + in the parameter list below. + + :param str command: The shell command to execute. + + :param bool asynchronous: + When set to ``True`` (default ``False``), enables asynchronous + behavior, as follows: + + - Connections to the controlling terminal are disabled, meaning you + will not see the subprocess output and it will not respond to + your keyboard input - similar to ``hide=True`` and + ``in_stream=False`` (though explicitly given + ``(out|err|in)_stream`` file-like objects will still be honored + as normal). + - `.run` returns immediately after starting the subprocess, and its + return value becomes an instance of `Promise` instead of + `Result`. + - `Promise` objects are primarily useful for their `~Promise.join` + method, which blocks until the subprocess exits (similar to + threading APIs) and either returns a final `~Result` or raises an + exception, just as a synchronous ``run`` would. + + - As with threading and similar APIs, users of + ``asynchronous=True`` should make sure to ``join`` their + `Promise` objects to prevent issues with interpreter + shutdown. + - One easy way to handle such cleanup is to use the `Promise` + as a context manager - it will automatically ``join`` at the + exit of the context block. + + .. versionadded:: 1.4 + + :param bool disown: + When set to ``True`` (default ``False``), returns immediately like + ``asynchronous=True``, but does not perform any background work + related to that subprocess (it is completely ignored). This allows + subprocesses using shell backgrounding or similar techniques (e.g. + trailing ``&``, ``nohup``) to persist beyond the lifetime of the + Python process running Invoke. + + .. note:: + If you're unsure whether you want this or ``asynchronous``, you + probably want ``asynchronous``! + + Specifically, ``disown=True`` has the following behaviors: + + - The return value is ``None`` instead of a `Result` or subclass. + - No I/O worker threads are spun up, so you will have no access to + the subprocess' stdout/stderr, your stdin will not be forwarded, + ``(out|err|in)_stream`` will be ignored, and features like + ``watchers`` will not function. + - No exit code is checked for, so you will not receive any errors + if the subprocess fails to exit cleanly. + - ``pty=True`` may not function correctly (subprocesses may not run + at all; this seems to be a potential bug in Python's + ``pty.fork``) unless your command line includes tools such as + ``nohup`` or (the shell builtin) ``disown``. + + .. versionadded:: 1.4 + + :param bool dry: + Whether to dry-run instead of truly invoking the given command. See + :option:`--dry` (which flips this on globally) for details on this + behavior. + + .. versionadded:: 1.3 + + :param bool echo: + Controls whether `.run` prints the command string to local stdout + prior to executing it. Default: ``False``. + + .. note:: + ``hide=True`` will override ``echo=True`` if both are given. + + :param echo_format: + A string, which when passed to Python's inbuilt ``.format`` method, + will change the format of the output when ``run.echo`` is set to + true. + + Currently, only ``{command}`` is supported as a parameter. + + Defaults to printing the full command string in ANSI-escaped bold. + + :param bool echo_stdin: + Whether to write data from ``in_stream`` back to ``out_stream``. + + In other words, in normal interactive usage, this parameter + controls whether Invoke mirrors what you type back to your + terminal. + + By default (when ``None``), this behavior is triggered by the + following: + + * Not using a pty to run the subcommand (i.e. ``pty=False``), + as ptys natively echo stdin to stdout on their own; + * And when the controlling terminal of Invoke itself (as per + ``in_stream``) appears to be a valid terminal device or TTY. + (Specifically, when `~invoke.util.isatty` yields a ``True`` + result when given ``in_stream``.) + + .. note:: + This property tends to be ``False`` when piping another + program's output into an Invoke session, or when running + Invoke within another program (e.g. running Invoke from + itself). + + If both of those properties are true, echoing will occur; if either + is false, no echoing will be performed. + + When not ``None``, this parameter will override that auto-detection + and force, or disable, echoing. + + :param str encoding: + Override auto-detection of which encoding the subprocess is using + for its stdout/stderr streams (which defaults to the return value + of `default_encoding`). + + :param err_stream: + Same as ``out_stream``, except for standard error, and defaulting + to ``sys.stderr``. + + :param dict env: + By default, subprocesses receive a copy of Invoke's own environment + (i.e. ``os.environ``). Supply a dict here to update that child + environment. + + For example, ``run('command', env={'PYTHONPATH': + '/some/virtual/env/maybe'})`` would modify the ``PYTHONPATH`` env + var, with the rest of the child's env looking identical to the + parent. + + .. seealso:: ``replace_env`` for changing 'update' to 'replace'. + + :param bool fallback: + Controls auto-fallback behavior re: problems offering a pty when + ``pty=True``. Whether this has any effect depends on the specific + `Runner` subclass being invoked. Default: ``True``. + + :param hide: + Allows the caller to disable ``run``'s default behavior of copying + the subprocess' stdout and stderr to the controlling terminal. + Specify ``hide='out'`` (or ``'stdout'``) to hide only the stdout + stream, ``hide='err'`` (or ``'stderr'``) to hide only stderr, or + ``hide='both'`` (or ``True``) to hide both streams. + + The default value is ``None``, meaning to print everything; + ``False`` will also disable hiding. + + .. note:: + Stdout and stderr are always captured and stored in the + ``Result`` object, regardless of ``hide``'s value. + + .. note:: + ``hide=True`` will also override ``echo=True`` if both are + given (either as kwargs or via config/CLI). + + :param in_stream: + A file-like stream object to used as the subprocess' standard + input. If ``None`` (the default), ``sys.stdin`` will be used. + + If ``False``, will disable stdin mirroring entirely (though other + functionality which writes to the subprocess' stdin, such as + autoresponding, will still function.) Disabling stdin mirroring can + help when ``sys.stdin`` is a misbehaving non-stream object, such as + under test harnesses or headless command runners. + + :param out_stream: + A file-like stream object to which the subprocess' standard output + should be written. If ``None`` (the default), ``sys.stdout`` will + be used. + + :param bool pty: + By default, ``run`` connects directly to the invoked process and + reads its stdout/stderr streams. Some programs will buffer (or even + behave) differently in this situation compared to using an actual + terminal or pseudoterminal (pty). To use a pty instead of the + default behavior, specify ``pty=True``. + + .. warning:: + Due to their nature, ptys have a single output stream, so the + ability to tell stdout apart from stderr is **not possible** + when ``pty=True``. As such, all output will appear on + ``out_stream`` (see below) and be captured into the ``stdout`` + result attribute. ``err_stream`` and ``stderr`` will always be + empty when ``pty=True``. + + :param bool replace_env: + When ``True``, causes the subprocess to receive the dictionary + given to ``env`` as its entire shell environment, instead of + updating a copy of ``os.environ`` (which is the default behavior). + Default: ``False``. + + :param str shell: + Which shell binary to use. Default: ``/bin/bash`` (on Unix; + ``COMSPEC`` or ``cmd.exe`` on Windows.) + + :param timeout: + Cause the runner to submit an interrupt to the subprocess and raise + `.CommandTimedOut`, if the command takes longer than ``timeout`` + seconds to execute. Defaults to ``None``, meaning no timeout. + + .. versionadded:: 1.3 + + :param bool warn: + Whether to warn and continue, instead of raising + `.UnexpectedExit`, when the executed command exits with a + nonzero status. Default: ``False``. + + .. note:: + This setting has no effect on exceptions, which will still be + raised, typically bundled in `.ThreadException` objects if they + were raised by the IO worker threads. + + Similarly, `.WatcherError` exceptions raised by + `.StreamWatcher` instances will also ignore this setting, and + will usually be bundled inside `.Failure` objects (in order to + preserve the execution context). + + Ditto `.CommandTimedOut` - basically, anything that prevents a + command from actually getting to "exited with an exit code" + ignores this flag. + + :param watchers: + A list of `.StreamWatcher` instances which will be used to scan the + program's ``stdout`` or ``stderr`` and may write into its ``stdin`` + (typically ``bytes`` objects) in response to patterns or other + heuristics. + + See :doc:`/concepts/watchers` for details on this functionality. + + Default: ``[]``. + + :returns: + `Result`, or a subclass thereof. + + :raises: + `.UnexpectedExit`, if the command exited nonzero and + ``warn`` was ``False``. + + :raises: + `.Failure`, if the command didn't even exit cleanly, e.g. if a + `.StreamWatcher` raised `.WatcherError`. + + :raises: + `.ThreadException` (if the background I/O threads encountered + exceptions other than `.WatcherError`). + + .. versionadded:: 1.0 + """ + try: + return self._run_body(command, **kwargs) + finally: + if not (self._asynchronous or self._disowned): + self.stop() + + def echo(self, command: str) -> None: + print(self.opts["echo_format"].format(command=command)) + + def _setup(self, command: str, kwargs: Any) -> None: + """ + Prepare data on ``self`` so we're ready to start running. + """ + # Normalize kwargs w/ config; sets self.opts, self.streams + self._unify_kwargs_with_config(kwargs) + # Environment setup + self.env = self.generate_env( + self.opts["env"], self.opts["replace_env"] + ) + # Arrive at final encoding if neither config nor kwargs had one + self.encoding = self.opts["encoding"] or self.default_encoding() + # Echo running command (wants to be early to be included in dry-run) + if self.opts["echo"]: + self.echo(command) + # Prepare common result args. + # TODO: I hate this. Needs a deeper separate think about tweaking + # Runner.generate_result in a way that isn't literally just this same + # two-step process, and which also works w/ downstream. + self.result_kwargs = dict( + command=command, + shell=self.opts["shell"], + env=self.env, + pty=self.using_pty, + hide=self.opts["hide"], + encoding=self.encoding, + ) + + def _run_body(self, command: str, **kwargs: Any) -> Optional["Result"]: + # Prepare all the bits n bobs. + self._setup(command, kwargs) + # If dry-run, stop here. + if self.opts["dry"]: + return self.generate_result( + **dict(self.result_kwargs, stdout="", stderr="", exited=0) + ) + # Start executing the actual command (runs in background) + self.start(command, self.opts["shell"], self.env) + # If disowned, we just stop here - no threads, no timer, no error + # checking, nada. + if self._disowned: + return None + # Stand up & kick off IO, timer threads + self.start_timer(self.opts["timeout"]) + self.threads, self.stdout, self.stderr = self.create_io_threads() + for thread in self.threads.values(): + thread.start() + # Wrap up or promise that we will, depending + return self.make_promise() if self._asynchronous else self._finish() + + def make_promise(self) -> "Promise": + """ + Return a `Promise` allowing async control of the rest of lifecycle. + + .. versionadded:: 1.4 + """ + return Promise(self) + + def _finish(self) -> "Result": + # Wait for subprocess to run, forwarding signals as we get them. + try: + while True: + try: + self.wait() + break # done waiting! + # Don't locally stop on ^C, only forward it: + # - if remote end really stops, we'll naturally stop after + # - if remote end does not stop (eg REPL, editor) we don't want + # to stop prematurely + except KeyboardInterrupt as e: + self.send_interrupt(e) + # TODO: honor other signals sent to our own process and + # transmit them to the subprocess before handling 'normally'. + # Make sure we tie off our worker threads, even if something exploded. + # Any exceptions that raised during self.wait() above will appear after + # this block. + finally: + # Inform stdin-mirroring worker to stop its eternal looping + self.program_finished.set() + # Join threads, storing inner exceptions, & set a timeout if + # necessary. (Segregate WatcherErrors as they are "anticipated + # errors" that want to show up at the end during creation of + # Failure objects.) + watcher_errors = [] + thread_exceptions = [] + for target, thread in self.threads.items(): + thread.join(self._thread_join_timeout(target)) + exception = thread.exception() + if exception is not None: + real = exception.value + if isinstance(real, WatcherError): + watcher_errors.append(real) + else: + thread_exceptions.append(exception) + # If any exceptions appeared inside the threads, raise them now as an + # aggregate exception object. + # NOTE: this is kept outside the 'finally' so that main-thread + # exceptions are raised before worker-thread exceptions; they're more + # likely to be Big Serious Problems. + if thread_exceptions: + raise ThreadException(thread_exceptions) + # Collate stdout/err, calculate exited, and get final result obj + result = self._collate_result(watcher_errors) + # Any presence of WatcherError from the threads indicates a watcher was + # upset and aborted execution; make a generic Failure out of it and + # raise that. + if watcher_errors: + # TODO: ambiguity exists if we somehow get WatcherError in *both* + # threads...as unlikely as that would normally be. + raise Failure(result, reason=watcher_errors[0]) + # If a timeout was requested and the subprocess did time out, shout. + timeout = self.opts["timeout"] + if timeout is not None and self.timed_out: + raise CommandTimedOut(result, timeout=timeout) + if not (result or self.opts["warn"]): + raise UnexpectedExit(result) + return result + + def _unify_kwargs_with_config(self, kwargs: Any) -> None: + """ + Unify `run` kwargs with config options to arrive at local options. + + Sets: + + - ``self.opts`` - opts dict + - ``self.streams`` - map of stream names to stream target values + """ + opts = {} + for key, value in self.context.config.run.items(): + runtime = kwargs.pop(key, None) + opts[key] = value if runtime is None else runtime + # Pull in command execution timeout, which stores config elsewhere, + # but only use it if it's actually set (backwards compat) + config_timeout = self.context.config.timeouts.command + opts["timeout"] = kwargs.pop("timeout", config_timeout) + # Handle invalid kwarg keys (anything left in kwargs). + # Act like a normal function would, i.e. TypeError + if kwargs: + err = "run() got an unexpected keyword argument '{}'" + raise TypeError(err.format(list(kwargs.keys())[0])) + # Update disowned, async flags + self._asynchronous = opts["asynchronous"] + self._disowned = opts["disown"] + if self._asynchronous and self._disowned: + err = "Cannot give both 'asynchronous' and 'disown' at the same time!" # noqa + raise ValueError(err) + # If hide was True, turn off echoing + if opts["hide"] is True: + opts["echo"] = False + # Conversely, ensure echoing is always on when dry-running + if opts["dry"] is True: + opts["echo"] = True + # Always hide if async + if self._asynchronous: + opts["hide"] = True + # Then normalize 'hide' from one of the various valid input values, + # into a stream-names tuple. Also account for the streams. + out_stream, err_stream = opts["out_stream"], opts["err_stream"] + opts["hide"] = normalize_hide(opts["hide"], out_stream, err_stream) + # Derive stream objects + if out_stream is None: + out_stream = sys.stdout + if err_stream is None: + err_stream = sys.stderr + in_stream = opts["in_stream"] + if in_stream is None: + # If in_stream hasn't been overridden, and we're async, we don't + # want to read from sys.stdin (otherwise the default) - so set + # False instead. + in_stream = False if self._asynchronous else sys.stdin + # Determine pty or no + self.using_pty = self.should_use_pty(opts["pty"], opts["fallback"]) + if opts["watchers"]: + self.watchers = opts["watchers"] + # Set data + self.opts = opts + self.streams = {"out": out_stream, "err": err_stream, "in": in_stream} + + def _collate_result(self, watcher_errors: List[WatcherError]) -> "Result": + # At this point, we had enough success that we want to be returning or + # raising detailed info about our execution; so we generate a Result. + stdout = "".join(self.stdout) + stderr = "".join(self.stderr) + if WINDOWS: + # "Universal newlines" - replace all standard forms of + # newline with \n. This is not technically Windows related + # (\r as newline is an old Mac convention) but we only apply + # the translation for Windows as that's the only platform + # it is likely to matter for these days. + stdout = stdout.replace("\r\n", "\n").replace("\r", "\n") + stderr = stderr.replace("\r\n", "\n").replace("\r", "\n") + # Get return/exit code, unless there were WatcherErrors to handle. + # NOTE: In that case, returncode() may block waiting on the process + # (which may be waiting for user input). Since most WatcherError + # situations lack a useful exit code anyways, skipping this doesn't + # really hurt any. + exited = None if watcher_errors else self.returncode() + # TODO: as noted elsewhere, I kinda hate this. Consider changing + # generate_result()'s API in next major rev so we can tidy up. + result = self.generate_result( + **dict( + self.result_kwargs, stdout=stdout, stderr=stderr, exited=exited + ) + ) + return result + + def _thread_join_timeout(self, target: Callable) -> Optional[int]: + # Add a timeout to out/err thread joins when it looks like they're not + # dead but their counterpart is dead; this indicates issue #351 (fixed + # by #432) where the subproc may hang because its stdout (or stderr) is + # no longer being consumed by the dead thread (and a pipe is filling + # up.) In that case, the non-dead thread is likely to block forever on + # a `recv` unless we add this timeout. + if target == self.handle_stdin: + return None + opposite = self.handle_stderr + if target == self.handle_stderr: + opposite = self.handle_stdout + if opposite in self.threads and self.threads[opposite].is_dead: + return 1 + return None + + def create_io_threads( + self, + ) -> Tuple[Dict[Callable, ExceptionHandlingThread], List[str], List[str]]: + """ + Create and return a dictionary of IO thread worker objects. + + Caller is expected to handle persisting and/or starting the wrapped + threads. + """ + stdout: List[str] = [] + stderr: List[str] = [] + # Set up IO thread parameters (format - body_func: {kwargs}) + thread_args: Dict[Callable, Any] = { + self.handle_stdout: { + "buffer_": stdout, + "hide": "stdout" in self.opts["hide"], + "output": self.streams["out"], + } + } + # After opt processing above, in_stream will be a real stream obj or + # False, so we can truth-test it. We don't even create a stdin-handling + # thread if it's False, meaning user indicated stdin is nonexistent or + # problematic. + if self.streams["in"]: + thread_args[self.handle_stdin] = { + "input_": self.streams["in"], + "output": self.streams["out"], + "echo": self.opts["echo_stdin"], + } + if not self.using_pty: + thread_args[self.handle_stderr] = { + "buffer_": stderr, + "hide": "stderr" in self.opts["hide"], + "output": self.streams["err"], + } + # Kick off IO threads + threads = {} + for target, kwargs in thread_args.items(): + t = ExceptionHandlingThread(target=target, kwargs=kwargs) + threads[target] = t + return threads, stdout, stderr + + def generate_result(self, **kwargs: Any) -> "Result": + """ + Create & return a suitable `Result` instance from the given ``kwargs``. + + Subclasses may wish to override this in order to manipulate things or + generate a `Result` subclass (e.g. ones containing additional metadata + besides the default). + + .. versionadded:: 1.0 + """ + return Result(**kwargs) + + def read_proc_output(self, reader: Callable) -> Generator[str, None, None]: + """ + Iteratively read & decode bytes from a subprocess' out/err stream. + + :param reader: + A literal reader function/partial, wrapping the actual stream + object in question, which takes a number of bytes to read, and + returns that many bytes (or ``None``). + + ``reader`` should be a reference to either `read_proc_stdout` or + `read_proc_stderr`, which perform the actual, platform/library + specific read calls. + + :returns: + A generator yielding strings. + + Specifically, each resulting string is the result of decoding + `read_chunk_size` bytes read from the subprocess' out/err stream. + + .. versionadded:: 1.0 + """ + # NOTE: Typically, reading from any stdout/err (local, remote or + # otherwise) can be thought of as "read until you get nothing back". + # This is preferable over "wait until an out-of-band signal claims the + # process is done running" because sometimes that signal will appear + # before we've actually read all the data in the stream (i.e.: a race + # condition). + while True: + data = reader(self.read_chunk_size) + if not data: + break + yield self.decode(data) + + def write_our_output(self, stream: IO, string: str) -> None: + """ + Write ``string`` to ``stream``. + + Also calls ``.flush()`` on ``stream`` to ensure that real terminal + streams don't buffer. + + :param stream: + A file-like stream object, mapping to the ``out_stream`` or + ``err_stream`` parameters of `run`. + + :param string: A Unicode string object. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + stream.write(string) + stream.flush() + + def _handle_output( + self, + buffer_: List[str], + hide: bool, + output: IO, + reader: Callable, + ) -> None: + # TODO: store un-decoded/raw bytes somewhere as well... + for data in self.read_proc_output(reader): + # Echo to local stdout if necessary + # TODO: should we rephrase this as "if you want to hide, give me a + # dummy output stream, e.g. something like /dev/null"? Otherwise, a + # combo of 'hide=stdout' + 'here is an explicit out_stream' means + # out_stream is never written to, and that seems...odd. + if not hide: + self.write_our_output(stream=output, string=data) + # Store in shared buffer so main thread can do things with the + # result after execution completes. + # NOTE: this is threadsafe insofar as no reading occurs until after + # the thread is join()'d. + buffer_.append(data) + # Run our specific buffer through the autoresponder framework + self.respond(buffer_) + + def handle_stdout( + self, buffer_: List[str], hide: bool, output: IO + ) -> None: + """ + Read process' stdout, storing into a buffer & printing/parsing. + + Intended for use as a thread target. Only terminates when all stdout + from the subprocess has been read. + + :param buffer_: The capture buffer shared with the main thread. + :param bool hide: Whether or not to replay data into ``output``. + :param output: + Output stream (file-like object) to write data into when not + hiding. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + self._handle_output( + buffer_, hide, output, reader=self.read_proc_stdout + ) + + def handle_stderr( + self, buffer_: List[str], hide: bool, output: IO + ) -> None: + """ + Read process' stderr, storing into a buffer & printing/parsing. + + Identical to `handle_stdout` except for the stream read from; see its + docstring for API details. + + .. versionadded:: 1.0 + """ + self._handle_output( + buffer_, hide, output, reader=self.read_proc_stderr + ) + + def read_our_stdin(self, input_: IO) -> Optional[str]: + """ + Read & decode bytes from a local stdin stream. + + :param input_: + Actual stream object to read from. Maps to ``in_stream`` in `run`, + so will often be ``sys.stdin``, but might be any stream-like + object. + + :returns: + A Unicode string, the result of decoding the read bytes (this might + be the empty string if the pipe has closed/reached EOF); or + ``None`` if stdin wasn't ready for reading yet. + + .. versionadded:: 1.0 + """ + # TODO: consider moving the character_buffered contextmanager call in + # here? Downside is it would be flipping those switches for every byte + # read instead of once per session, which could be costly (?). + bytes_ = None + if ready_for_reading(input_): + try: + bytes_ = input_.read(bytes_to_read(input_)) + except OSError as e: + # Assume EBADF in this situation implies running under nohup or + # similar, where: + # - we cannot reliably detect a bad FD up front + # - trying to read it would explode + # - user almost surely doesn't care about stdin anyways + # and ignore it (but not other OSErrors!) + if e.errno != errno.EBADF: + raise + # Decode if it appears to be binary-type. (From real terminal + # streams, usually yes; from file-like objects, often no.) + if bytes_ and isinstance(bytes_, bytes): + # TODO: will decoding 1 byte at a time break multibyte + # character encodings? How to square interactivity with that? + bytes_ = self.decode(bytes_) + return bytes_ + + def handle_stdin( + self, + input_: IO, + output: IO, + echo: bool = False, + ) -> None: + """ + Read local stdin, copying into process' stdin as necessary. + + Intended for use as a thread target. + + .. note:: + Because real terminal stdin streams have no well-defined "end", if + such a stream is detected (based on existence of a callable + ``.fileno()``) this method will wait until `program_finished` is + set, before terminating. + + When the stream doesn't appear to be from a terminal, the same + semantics as `handle_stdout` are used - the stream is simply + ``read()`` from until it returns an empty value. + + :param input_: Stream (file-like object) from which to read. + :param output: Stream (file-like object) to which echoing may occur. + :param bool echo: User override option for stdin-stdout echoing. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + # TODO: reinstate lock/whatever thread logic from fab v1 which prevents + # reading from stdin while other parts of the code are prompting for + # runtime passwords? (search for 'input_enabled') + # TODO: fabric#1339 is strongly related to this, if it's not literally + # exposing some regression in Fabric 1.x itself. + closed_stdin = False + with character_buffered(input_): + while True: + data = self.read_our_stdin(input_) + if data: + # Mirror what we just read to process' stdin. + # We encode to ensure bytes, but skip the decode step since + # there's presumably no need (nobody's interacting with + # this data programmatically). + self.write_proc_stdin(data) + # Also echo it back to local stdout (or whatever + # out_stream is set to) when necessary. + if echo is None: + echo = self.should_echo_stdin(input_, output) + if echo: + self.write_our_output(stream=output, string=data) + # Empty string/char/byte != None. Can't just use 'else' here. + elif data is not None: + # When reading from file-like objects that aren't "real" + # terminal streams, an empty byte signals EOF. + if not self.using_pty and not closed_stdin: + self.close_proc_stdin() + closed_stdin = True + # Dual all-done signals: program being executed is done + # running, *and* we don't seem to be reading anything out of + # stdin. (NOTE: If we only test the former, we may encounter + # race conditions re: unread stdin.) + if self.program_finished.is_set() and not data: + break + # Take a nap so we're not chewing CPU. + time.sleep(self.input_sleep) + + def should_echo_stdin(self, input_: IO, output: IO) -> bool: + """ + Determine whether data read from ``input_`` should echo to ``output``. + + Used by `handle_stdin`; tests attributes of ``input_`` and ``output``. + + :param input_: Input stream (file-like object). + :param output: Output stream (file-like object). + :returns: A ``bool``. + + .. versionadded:: 1.0 + """ + return (not self.using_pty) and isatty(input_) + + def respond(self, buffer_: List[str]) -> None: + """ + Write to the program's stdin in response to patterns in ``buffer_``. + + The patterns and responses are driven by the `.StreamWatcher` instances + from the ``watchers`` kwarg of `run` - see :doc:`/concepts/watchers` + for a conceptual overview. + + :param buffer: + The capture buffer for this thread's particular IO stream. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + # Join buffer contents into a single string; without this, + # StreamWatcher subclasses can't do things like iteratively scan for + # pattern matches. + # NOTE: using string.join should be "efficient enough" for now, re: + # speed and memory use. Should that become false, consider using + # StringIO or cStringIO (tho the latter doesn't do Unicode well?) which + # is apparently even more efficient. + stream = "".join(buffer_) + for watcher in self.watchers: + for response in watcher.submit(stream): + self.write_proc_stdin(response) + + def generate_env( + self, env: Dict[str, Any], replace_env: bool + ) -> Dict[str, Any]: + """ + Return a suitable environment dict based on user input & behavior. + + :param dict env: Dict supplying overrides or full env, depending. + :param bool replace_env: + Whether ``env`` updates, or is used in place of, the value of + `os.environ`. + + :returns: A dictionary of shell environment vars. + + .. versionadded:: 1.0 + """ + return env if replace_env else dict(os.environ, **env) + + def should_use_pty(self, pty: bool, fallback: bool) -> bool: + """ + Should execution attempt to use a pseudo-terminal? + + :param bool pty: + Whether the user explicitly asked for a pty. + :param bool fallback: + Whether falling back to non-pty execution should be allowed, in + situations where ``pty=True`` but a pty could not be allocated. + + .. versionadded:: 1.0 + """ + # NOTE: fallback not used: no falling back implemented by default. + return pty + + @property + def has_dead_threads(self) -> bool: + """ + Detect whether any IO threads appear to have terminated unexpectedly. + + Used during process-completion waiting (in `wait`) to ensure we don't + deadlock our child process if our IO processing threads have + errored/died. + + :returns: + ``True`` if any threads appear to have terminated with an + exception, ``False`` otherwise. + + .. versionadded:: 1.0 + """ + return any(x.is_dead for x in self.threads.values()) + + def wait(self) -> None: + """ + Block until the running command appears to have exited. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + while True: + proc_finished = self.process_is_finished + dead_threads = self.has_dead_threads + if proc_finished or dead_threads: + break + time.sleep(self.input_sleep) + + def write_proc_stdin(self, data: str) -> None: + """ + Write encoded ``data`` to the running process' stdin. + + :param data: A Unicode string. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + # Encode always, then request implementing subclass to perform the + # actual write to subprocess' stdin. + self._write_proc_stdin(data.encode(self.encoding)) + + def decode(self, data: bytes) -> str: + """ + Decode some ``data`` bytes, returning Unicode. + + .. versionadded:: 1.0 + """ + # NOTE: yes, this is a 1-liner. The point is to make it much harder to + # forget to use 'replace' when decoding :) + return data.decode(self.encoding, "replace") + + @property + def process_is_finished(self) -> bool: + """ + Determine whether our subprocess has terminated. + + .. note:: + The implementation of this method should be nonblocking, as it is + used within a query/poll loop. + + :returns: + ``True`` if the subprocess has finished running, ``False`` + otherwise. + + .. versionadded:: 1.0 + """ + raise NotImplementedError + + def start(self, command: str, shell: str, env: Dict[str, Any]) -> None: + """ + Initiate execution of ``command`` (via ``shell``, with ``env``). + + Typically this means use of a forked subprocess or requesting start of + execution on a remote system. + + In most cases, this method will also set subclass-specific member + variables used in other methods such as `wait` and/or `returncode`. + + :param str command: + Command string to execute. + + :param str shell: + Shell to use when executing ``command``. + + :param dict env: + Environment dict used to prep shell environment. + + .. versionadded:: 1.0 + """ + raise NotImplementedError + + def start_timer(self, timeout: int) -> None: + """ + Start a timer to `kill` our subprocess after ``timeout`` seconds. + """ + if timeout is not None: + self._timer = threading.Timer(timeout, self.kill) + self._timer.start() + + def read_proc_stdout(self, num_bytes: int) -> Optional[bytes]: + """ + Read ``num_bytes`` from the running process' stdout stream. + + :param int num_bytes: Number of bytes to read at maximum. + + :returns: A string/bytes object. + + .. versionadded:: 1.0 + """ + raise NotImplementedError + + def read_proc_stderr(self, num_bytes: int) -> Optional[bytes]: + """ + Read ``num_bytes`` from the running process' stderr stream. + + :param int num_bytes: Number of bytes to read at maximum. + + :returns: A string/bytes object. + + .. versionadded:: 1.0 + """ + raise NotImplementedError + + def _write_proc_stdin(self, data: bytes) -> None: + """ + Write ``data`` to running process' stdin. + + This should never be called directly; it's for subclasses to implement. + See `write_proc_stdin` for the public API call. + + :param data: Already-encoded byte data suitable for writing. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + raise NotImplementedError + + def close_proc_stdin(self) -> None: + """ + Close running process' stdin. + + :returns: ``None``. + + .. versionadded:: 1.3 + """ + raise NotImplementedError + + def default_encoding(self) -> str: + """ + Return a string naming the expected encoding of subprocess streams. + + This return value should be suitable for use by encode/decode methods. + + .. versionadded:: 1.0 + """ + # TODO: probably wants to be 2 methods, one for local and one for + # subprocess. For now, good enough to assume both are the same. + return default_encoding() + + def send_interrupt(self, interrupt: "KeyboardInterrupt") -> None: + """ + Submit an interrupt signal to the running subprocess. + + In almost all implementations, the default behavior is what will be + desired: submit ``\x03`` to the subprocess' stdin pipe. However, we + leave this as a public method in case this default needs to be + augmented or replaced. + + :param interrupt: + The locally-sourced ``KeyboardInterrupt`` causing the method call. + + :returns: ``None``. + + .. versionadded:: 1.0 + """ + self.write_proc_stdin("\x03") + + def returncode(self) -> Optional[int]: + """ + Return the numeric return/exit code resulting from command execution. + + :returns: + `int`, if any reasonable return code could be determined, or + ``None`` in corner cases where that was not possible. + + .. versionadded:: 1.0 + """ + raise NotImplementedError + + def stop(self) -> None: + """ + Perform final cleanup, if necessary. + + This method is called within a ``finally`` clause inside the main `run` + method. Depending on the subclass, it may be a no-op, or it may do + things such as close network connections or open files. + + :returns: ``None`` + + .. versionadded:: 1.0 + """ + if self._timer: + self._timer.cancel() + + def kill(self) -> None: + """ + Forcibly terminate the subprocess. + + Typically only used by the timeout functionality. + + This is often a "best-effort" attempt, e.g. remote subprocesses often + must settle for simply shutting down the local side of the network + connection and hoping the remote end eventually gets the message. + """ + raise NotImplementedError + + @property + def timed_out(self) -> bool: + """ + Returns ``True`` if the subprocess stopped because it timed out. + + .. versionadded:: 1.3 + """ + # Timer expiry implies we did time out. (The timer itself will have + # killed the subprocess, allowing us to even get to this point.) + return bool(self._timer and not self._timer.is_alive()) + + +class Local(Runner): + """ + Execute a command on the local system in a subprocess. + + .. note:: + When Invoke itself is executed without a controlling terminal (e.g. + when ``sys.stdin`` lacks a useful ``fileno``), it's not possible to + present a handle on our PTY to local subprocesses. In such situations, + `Local` will fallback to behaving as if ``pty=False`` (on the theory + that degraded execution is better than none at all) as well as printing + a warning to stderr. + + To disable this behavior, say ``fallback=False``. + + .. versionadded:: 1.0 + """ + + def __init__(self, context: "Context") -> None: + super().__init__(context) + # Bookkeeping var for pty use case + self.status = 0 + + def should_use_pty(self, pty: bool = False, fallback: bool = True) -> bool: + use_pty = False + if pty: + use_pty = True + # TODO: pass in & test in_stream, not sys.stdin + if not has_fileno(sys.stdin) and fallback: + if not self.warned_about_pty_fallback: + err = "WARNING: stdin has no fileno; falling back to non-pty execution!\n" # noqa + sys.stderr.write(err) + self.warned_about_pty_fallback = True + use_pty = False + return use_pty + + def read_proc_stdout(self, num_bytes: int) -> Optional[bytes]: + # Obtain useful read-some-bytes function + if self.using_pty: + # Need to handle spurious OSErrors on some Linux platforms. + try: + data = os.read(self.parent_fd, num_bytes) + except OSError as e: + # Only eat I/O specific OSErrors so we don't hide others + stringified = str(e) + io_errors = ( + # The typical default + "Input/output error", + # Some less common platforms phrase it this way + "I/O error", + ) + if not any(error in stringified for error in io_errors): + raise + # The bad OSErrors happen after all expected output has + # appeared, so we return a falsey value, which triggers the + # "end of output" logic in code using reader functions. + data = None + elif self.process and self.process.stdout: + data = os.read(self.process.stdout.fileno(), num_bytes) + else: + data = None + return data + + def read_proc_stderr(self, num_bytes: int) -> Optional[bytes]: + # NOTE: when using a pty, this will never be called. + # TODO: do we ever get those OSErrors on stderr? Feels like we could? + if self.process and self.process.stderr: + return os.read(self.process.stderr.fileno(), num_bytes) + return None + + def _write_proc_stdin(self, data: bytes) -> None: + # NOTE: parent_fd from os.fork() is a read/write pipe attached to our + # forked process' stdout/stdin, respectively. + if self.using_pty: + fd = self.parent_fd + elif self.process and self.process.stdin: + fd = self.process.stdin.fileno() + else: + raise SubprocessPipeError( + "Unable to write to missing subprocess or stdin!" + ) + # Try to write, ignoring broken pipes if encountered (implies child + # process exited before the process piping stdin to us finished; + # there's nothing we can do about that!) + try: + os.write(fd, data) + except OSError as e: + if "Broken pipe" not in str(e): + raise + + def close_proc_stdin(self) -> None: + if self.using_pty: + # there is no working scenario to tell the process that stdin + # closed when using pty + raise SubprocessPipeError("Cannot close stdin when pty=True") + elif self.process and self.process.stdin: + self.process.stdin.close() + else: + raise SubprocessPipeError( + "Unable to close missing subprocess or stdin!" + ) + + def start(self, command: str, shell: str, env: Dict[str, Any]) -> None: + if self.using_pty: + if pty is None: # Encountered ImportError + err = "You indicated pty=True, but your platform doesn't support the 'pty' module!" # noqa + sys.exit(err) + cols, rows = pty_size() + self.pid, self.parent_fd = pty.fork() + # If we're the child process, load up the actual command in a + # shell, just as subprocess does; this replaces our process - whose + # pipes are all hooked up to the PTY - with the "real" one. + if self.pid == 0: + # TODO: both pty.spawn() and pexpect.spawn() do a lot of + # setup/teardown involving tty.setraw, getrlimit, signal. + # Ostensibly we'll want some of that eventually, but if + # possible write tests - integration-level if necessary - + # before adding it! + # + # Set pty window size based on what our own controlling + # terminal's window size appears to be. + # TODO: make subroutine? + winsize = struct.pack("HHHH", rows, cols, 0, 0) + fcntl.ioctl(sys.stdout.fileno(), termios.TIOCSWINSZ, winsize) + # Use execve for bare-minimum "exec w/ variable # args + env" + # behavior. No need for the 'p' (use PATH to find executable) + # for now. + # NOTE: stdlib subprocess (actually its posix flavor, which is + # written in C) uses either execve or execv, depending. + os.execve(shell, [shell, "-c", command], env) + else: + self.process = Popen( + command, + shell=True, + executable=shell, + env=env, + stdout=PIPE, + stderr=PIPE, + stdin=PIPE, + ) + + def kill(self) -> None: + pid = self.pid if self.using_pty else self.process.pid + try: + os.kill(pid, signal.SIGKILL) + except ProcessLookupError: + # In odd situations where our subprocess is already dead, don't + # throw this upwards. + pass + + @property + def process_is_finished(self) -> bool: + if self.using_pty: + # NOTE: + # https://github.com/pexpect/ptyprocess/blob/4058faa05e2940662ab6da1330aa0586c6f9cd9c/ptyprocess/ptyprocess.py#L680-L687 + # implies that Linux "requires" use of the blocking, non-WNOHANG + # version of this call. Our testing doesn't verify this, however, + # so... + # NOTE: It does appear to be totally blocking on Windows, so our + # issue #351 may be totally unsolvable there. Unclear. + pid_val, self.status = os.waitpid(self.pid, os.WNOHANG) + return pid_val != 0 + else: + return self.process.poll() is not None + + def returncode(self) -> Optional[int]: + if self.using_pty: + # No subprocess.returncode available; use WIFEXITED/WIFSIGNALED to + # determine whch of WEXITSTATUS / WTERMSIG to use. + # TODO: is it safe to just say "call all WEXITSTATUS/WTERMSIG and + # return whichever one of them is nondefault"? Probably not? + # NOTE: doing this in an arbitrary order should be safe since only + # one of the WIF* methods ought to ever return True. + code = None + if os.WIFEXITED(self.status): + code = os.WEXITSTATUS(self.status) + elif os.WIFSIGNALED(self.status): + code = os.WTERMSIG(self.status) + # Match subprocess.returncode by turning signals into negative + # 'exit code' integers. + code = -1 * code + return code + # TODO: do we care about WIFSTOPPED? Maybe someday? + else: + return self.process.returncode + + def stop(self) -> None: + super().stop() + # If we opened a PTY for child communications, make sure to close() it, + # otherwise long-running Invoke-using processes exhaust their file + # descriptors eventually. + if self.using_pty: + try: + os.close(self.parent_fd) + except Exception: + # If something weird happened preventing the close, there's + # nothing to be done about it now... + pass + + +class Result: + """ + A container for information about the result of a command execution. + + All params are exposed as attributes of the same name and type. + + :param str stdout: + The subprocess' standard output. + + :param str stderr: + Same as ``stdout`` but containing standard error (unless the process + was invoked via a pty, in which case it will be empty; see + `.Runner.run`.) + + :param str encoding: + The string encoding used by the local shell environment. + + :param str command: + The command which was executed. + + :param str shell: + The shell binary used for execution. + + :param dict env: + The shell environment used for execution. (Default is the empty dict, + ``{}``, not ``None`` as displayed in the signature.) + + :param int exited: + An integer representing the subprocess' exit/return code. + + .. note:: + This may be ``None`` in situations where the subprocess did not run + to completion, such as when auto-responding failed or a timeout was + reached. + + :param bool pty: + A boolean describing whether the subprocess was invoked with a pty or + not; see `.Runner.run`. + + :param tuple hide: + A tuple of stream names (none, one or both of ``('stdout', 'stderr')``) + which were hidden from the user when the generating command executed; + this is a normalized value derived from the ``hide`` parameter of + `.Runner.run`. + + For example, ``run('command', hide='stdout')`` will yield a `Result` + where ``result.hide == ('stdout',)``; ``hide=True`` or ``hide='both'`` + results in ``result.hide == ('stdout', 'stderr')``; and ``hide=False`` + (the default) generates ``result.hide == ()`` (the empty tuple.) + + .. note:: + `Result` objects' truth evaluation is equivalent to their `.ok` + attribute's value. Therefore, quick-and-dirty expressions like the + following are possible:: + + if run("some shell command"): + do_something() + else: + handle_problem() + + However, remember `Zen of Python #2 + `_. + + .. versionadded:: 1.0 + """ + + # TODO: inherit from namedtuple instead? heh (or: use attrs from pypi) + def __init__( + self, + stdout: str = "", + stderr: str = "", + encoding: Optional[str] = None, + command: str = "", + shell: str = "", + env: Optional[Dict[str, Any]] = None, + exited: int = 0, + pty: bool = False, + hide: Tuple[str, ...] = tuple(), + ): + self.stdout = stdout + self.stderr = stderr + if encoding is None: + encoding = default_encoding() + self.encoding = encoding + self.command = command + self.shell = shell + self.env = {} if env is None else env + self.exited = exited + self.pty = pty + self.hide = hide + + @property + def return_code(self) -> int: + """ + An alias for ``.exited``. + + .. versionadded:: 1.0 + """ + return self.exited + + def __bool__(self) -> bool: + return self.ok + + def __str__(self) -> str: + if self.exited is not None: + desc = "Command exited with status {}.".format(self.exited) + else: + desc = "Command was not fully executed due to watcher error." + ret = [desc] + for x in ("stdout", "stderr"): + val = getattr(self, x) + ret.append( + """=== {} === +{} +""".format( + x, val.rstrip() + ) + if val + else "(no {})".format(x) + ) + return "\n".join(ret) + + def __repr__(self) -> str: + # TODO: more? e.g. len of stdout/err? (how to represent cleanly in a + # 'x=y' format like this? e.g. '4b' is ambiguous as to what it + # represents + template = "" + return template.format(self.command, self.exited) + + @property + def ok(self) -> bool: + """ + A boolean equivalent to ``exited == 0``. + + .. versionadded:: 1.0 + """ + return bool(self.exited == 0) + + @property + def failed(self) -> bool: + """ + The inverse of ``ok``. + + I.e., ``True`` if the program exited with a nonzero return code, and + ``False`` otherwise. + + .. versionadded:: 1.0 + """ + return not self.ok + + def tail(self, stream: str, count: int = 10) -> str: + """ + Return the last ``count`` lines of ``stream``, plus leading whitespace. + + :param str stream: + Name of some captured stream attribute, eg ``"stdout"``. + :param int count: + Number of lines to preserve. + + .. versionadded:: 1.3 + """ + # TODO: preserve alternate line endings? Mehhhh + # NOTE: no trailing \n preservation; easier for below display if + # normalized + return "\n\n" + "\n".join(getattr(self, stream).splitlines()[-count:]) + + +class Promise(Result): + """ + A promise of some future `Result`, yielded from asynchronous execution. + + This class' primary API member is `join`; instances may also be used as + context managers, which will automatically call `join` when the block + exits. In such cases, the context manager yields ``self``. + + `Promise` also exposes copies of many `Result` attributes, specifically + those that derive from `~Runner.run` kwargs and not the result of command + execution. For example, ``command`` is replicated here, but ``stdout`` is + not. + + .. versionadded:: 1.4 + """ + + def __init__(self, runner: "Runner") -> None: + """ + Create a new promise. + + :param runner: + An in-flight `Runner` instance making this promise. + + Must already have started the subprocess and spun up IO threads. + """ + self.runner = runner + # Basically just want exactly this (recently refactored) kwargs dict. + # TODO: consider proxying vs copying, but prob wait for refactor + for key, value in self.runner.result_kwargs.items(): + setattr(self, key, value) + + def join(self) -> Result: + """ + Block until associated subprocess exits, returning/raising the result. + + This acts identically to the end of a synchronously executed ``run``, + namely that: + + - various background threads (such as IO workers) are themselves + joined; + - if the subprocess exited normally, a `Result` is returned; + - in any other case (unforeseen exceptions, IO sub-thread + `.ThreadException`, `.Failure`, `.WatcherError`) the relevant + exception is raised here. + + See `~Runner.run` docs, or those of the relevant classes, for further + details. + """ + try: + return self.runner._finish() + finally: + self.runner.stop() + + def __enter__(self) -> "Promise": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: BaseException, + exc_tb: Optional[TracebackType], + ) -> None: + self.join() + + +def normalize_hide( + val: Any, + out_stream: Optional[str] = None, + err_stream: Optional[str] = None, +) -> Tuple[str, ...]: + # Normalize to list-of-stream-names + hide_vals = (None, False, "out", "stdout", "err", "stderr", "both", True) + if val not in hide_vals: + err = "'hide' got {!r} which is not in {!r}" + raise ValueError(err.format(val, hide_vals)) + if val in (None, False): + hide = [] + elif val in ("both", True): + hide = ["stdout", "stderr"] + elif val == "out": + hide = ["stdout"] + elif val == "err": + hide = ["stderr"] + else: + hide = [val] + # Revert any streams that have been overridden from the default value + if out_stream is not None and "stdout" in hide: + hide.remove("stdout") + if err_stream is not None and "stderr" in hide: + hide.remove("stderr") + return tuple(hide) + + +def default_encoding() -> str: + """ + Obtain apparent interpreter-local default text encoding. + + Often used as a baseline in situations where we must use SOME encoding for + unknown-but-presumably-text bytes, and the user has not specified an + override. + """ + encoding = locale.getpreferredencoding(False) + return encoding diff --git a/.venv/lib/python3.9/site-packages/invoke/tasks.py b/.venv/lib/python3.9/site-packages/invoke/tasks.py new file mode 100644 index 0000000..cd3075e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/tasks.py @@ -0,0 +1,519 @@ +""" +This module contains the core `.Task` class & convenience decorators used to +generate new tasks. +""" + +import inspect +import types +from copy import deepcopy +from functools import update_wrapper +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + List, + Generic, + Iterable, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from .context import Context +from .parser import Argument, translate_underscores + +if TYPE_CHECKING: + from inspect import Signature + from .config import Config + +T = TypeVar("T", bound=Callable) + + +class Task(Generic[T]): + """ + Core object representing an executable task & its argument specification. + + For the most part, this object is a clearinghouse for all of the data that + may be supplied to the `@task ` decorator, such as + ``name``, ``aliases``, ``positional`` etc, which appear as attributes. + + In addition, instantiation copies some introspection/documentation friendly + metadata off of the supplied ``body`` object, such as ``__doc__``, + ``__name__`` and ``__module__``, allowing it to "appear as" ``body`` for + most intents and purposes. + + .. versionadded:: 1.0 + """ + + # TODO: store these kwarg defaults central, refer to those values both here + # and in @task. + # TODO: allow central per-session / per-taskmodule control over some of + # them, e.g. (auto_)positional, auto_shortflags. + # NOTE: we shadow __builtins__.help here on purpose - obfuscating to avoid + # it feels bad, given the builtin will never actually be in play anywhere + # except a debug shell whose frame is exactly inside this class. + def __init__( + self, + body: Callable, + name: Optional[str] = None, + aliases: Iterable[str] = (), + positional: Optional[Iterable[str]] = None, + optional: Iterable[str] = (), + default: bool = False, + auto_shortflags: bool = True, + help: Optional[Dict[str, Any]] = None, + pre: Optional[Union[List[str], str]] = None, + post: Optional[Union[List[str], str]] = None, + autoprint: bool = False, + iterable: Optional[Iterable[str]] = None, + incrementable: Optional[Iterable[str]] = None, + ) -> None: + # Real callable + self.body = body + update_wrapper(self, self.body) + # Copy a bunch of special properties from the body for the benefit of + # Sphinx autodoc or other introspectors. + self.__doc__ = getattr(body, "__doc__", "") + self.__name__ = getattr(body, "__name__", "") + self.__module__ = getattr(body, "__module__", "") + # Default name, alternate names, and whether it should act as the + # default for its parent collection + self._name = name + self.aliases = aliases + self.is_default = default + # Arg/flag/parser hints + self.positional = self.fill_implicit_positionals(positional) + self.optional = tuple(optional) + self.iterable = iterable or [] + self.incrementable = incrementable or [] + self.auto_shortflags = auto_shortflags + self.help = (help or {}).copy() + # Call chain bidness + self.pre = pre or [] + self.post = post or [] + self.times_called = 0 + # Whether to print return value post-execution + self.autoprint = autoprint + + @property + def name(self) -> str: + return self._name or self.__name__ + + def __repr__(self) -> str: + aliases = "" + if self.aliases: + aliases = " ({})".format(", ".join(self.aliases)) + return "".format(self.name, aliases) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Task) or self.name != other.name: + return False + # Functions do not define __eq__ but func_code objects apparently do. + # (If we're wrapping some other callable, they will be responsible for + # defining equality on their end.) + if self.body == other.body: + return True + else: + try: + return self.body.__code__ == other.body.__code__ + except AttributeError: + return False + + def __hash__(self) -> int: + # Presumes name and body will never be changed. Hrm. + # Potentially cleaner to just not use Tasks as hash keys, but let's do + # this for now. + return hash(self.name) + hash(self.body) + + def __call__(self, *args: Any, **kwargs: Any) -> T: + # Guard against calling tasks with no context. + if not isinstance(args[0], Context): + err = "Task expected a Context as its first arg, got {} instead!" + # TODO: raise a custom subclass _of_ TypeError instead + raise TypeError(err.format(type(args[0]))) + result = self.body(*args, **kwargs) + self.times_called += 1 + return result + + @property + def called(self) -> bool: + return self.times_called > 0 + + def argspec(self, body: Callable) -> "Signature": + """ + Returns a modified `inspect.Signature` based on that of ``body``. + + :returns: + an `inspect.Signature` matching that of ``body``, but with the + initial context argument removed. + :raises TypeError: + if the task lacks an initial positional `.Context` argument. + + .. versionadded:: 1.0 + .. versionchanged:: 2.0 + Changed from returning a two-tuple of ``(arg_names, spec_dict)`` to + returning an `inspect.Signature`. + """ + # Handle callable-but-not-function objects + func = ( + body + if isinstance(body, types.FunctionType) + else body.__call__ # type: ignore + ) + # Rebuild signature with first arg dropped, or die usefully(ish trying + sig = inspect.signature(func) + params = list(sig.parameters.values()) + # TODO: this ought to also check if an extant 1st param _was_ a Context + # arg, and yell similarly if not. + if not len(params): + # TODO: see TODO under __call__, this should be same type + raise TypeError("Tasks must have an initial Context argument!") + return sig.replace(parameters=params[1:]) + + def fill_implicit_positionals( + self, positional: Optional[Iterable[str]] + ) -> Iterable[str]: + # If positionals is None, everything lacking a default + # value will be automatically considered positional. + if positional is None: + positional = [ + x.name + for x in self.argspec(self.body).parameters.values() + if x.default is inspect.Signature.empty + ] + return positional + + def arg_opts( + self, name: str, default: str, taken_names: Set[str] + ) -> Dict[str, Any]: + opts: Dict[str, Any] = {} + # Whether it's positional or not + opts["positional"] = name in self.positional + # Whether it is a value-optional flag + opts["optional"] = name in self.optional + # Whether it should be of an iterable (list) kind + if name in self.iterable: + opts["kind"] = list + # If user gave a non-None default, hopefully they know better + # than us what they want here (and hopefully it offers the list + # protocol...) - otherwise supply useful default + opts["default"] = default if default is not None else [] + # Whether it should increment its value or not + if name in self.incrementable: + opts["incrementable"] = True + # Argument name(s) (replace w/ dashed version if underscores present, + # and move the underscored version to be the attr_name instead.) + original_name = name # For reference in eg help= + if "_" in name: + opts["attr_name"] = name + name = translate_underscores(name) + names = [name] + if self.auto_shortflags: + # Must know what short names are available + for char in name: + if not (char == name or char in taken_names): + names.append(char) + break + opts["names"] = names + # Handle default value & kind if possible + if default not in (None, inspect.Signature.empty): + # TODO: allow setting 'kind' explicitly. + # NOTE: skip setting 'kind' if optional is True + type(default) is + # bool; that results in a nonsensical Argument which gives the + # parser grief in a few ways. + kind = type(default) + if not (opts["optional"] and kind is bool): + opts["kind"] = kind + opts["default"] = default + # Help + for possibility in name, original_name: + if possibility in self.help: + opts["help"] = self.help.pop(possibility) + break + return opts + + def get_arguments( + self, ignore_unknown_help: Optional[bool] = None + ) -> List[Argument]: + """ + Return a list of Argument objects representing this task's signature. + + :param bool ignore_unknown_help: + Controls whether unknown help flags cause errors. See the config + option by the same name for details. + + .. versionadded:: 1.0 + .. versionchanged:: 1.7 + Added the ``ignore_unknown_help`` kwarg. + """ + # Core argspec + sig = self.argspec(self.body) + # Prime the list of all already-taken names (mostly for help in + # choosing auto shortflags) + taken_names = set(sig.parameters.keys()) + # Build arg list (arg_opts will take care of setting up shortnames, + # etc) + args = [] + for param in sig.parameters.values(): + new_arg = Argument( + **self.arg_opts(param.name, param.default, taken_names) + ) + args.append(new_arg) + # Update taken_names list with new argument's full name list + # (which may include new shortflags) so subsequent Argument + # creation knows what's taken. + taken_names.update(set(new_arg.names)) + # If any values were leftover after consuming a 'help' dict, it implies + # the user messed up & had a typo or similar. Let's explode. + if self.help and not ignore_unknown_help: + raise ValueError( + "Help field was set for param(s) that don't exist: {}".format( + list(self.help.keys()) + ) + ) + # Now we need to ensure positionals end up in the front of the list, in + # order given in self.positionals, so that when Context consumes them, + # this order is preserved. + for posarg in reversed(list(self.positional)): + for i, arg in enumerate(args): + if arg.name == posarg: + args.insert(0, args.pop(i)) + break + return args + + +def task(*args: Any, **kwargs: Any) -> Callable: + """ + Marks wrapped callable object as a valid Invoke task. + + May be called without any parentheses if no extra options need to be + specified. Otherwise, the following keyword arguments are allowed in the + parenthese'd form: + + * ``name``: Default name to use when binding to a `.Collection`. Useful for + avoiding Python namespace issues (i.e. when the desired CLI level name + can't or shouldn't be used as the Python level name.) + * ``aliases``: Specify one or more aliases for this task, allowing it to be + invoked as multiple different names. For example, a task named ``mytask`` + with a simple ``@task`` wrapper may only be invoked as ``"mytask"``. + Changing the decorator to be ``@task(aliases=['myothertask'])`` allows + invocation as ``"mytask"`` *or* ``"myothertask"``. + * ``positional``: Iterable overriding the parser's automatic "args with no + default value are considered positional" behavior. If a list of arg + names, no args besides those named in this iterable will be considered + positional. (This means that an empty list will force all arguments to be + given as explicit flags.) + * ``optional``: Iterable of argument names, declaring those args to + have :ref:`optional values `. Such arguments may be + given as value-taking options (e.g. ``--my-arg=myvalue``, wherein the + task is given ``"myvalue"``) or as Boolean flags (``--my-arg``, resulting + in ``True``). + * ``iterable``: Iterable of argument names, declaring them to :ref:`build + iterable values `. + * ``incrementable``: Iterable of argument names, declaring them to + :ref:`increment their values `. + * ``default``: Boolean option specifying whether this task should be its + collection's default task (i.e. called if the collection's own name is + given.) + * ``auto_shortflags``: Whether or not to automatically create short + flags from task options; defaults to True. + * ``help``: Dict mapping argument names to their help strings. Will be + displayed in ``--help`` output. For arguments containing underscores + (which are transformed into dashes on the CLI by default), either the + dashed or underscored version may be supplied here. + * ``pre``, ``post``: Lists of task objects to execute prior to, or after, + the wrapped task whenever it is executed. + * ``autoprint``: Boolean determining whether to automatically print this + task's return value to standard output when invoked directly via the CLI. + Defaults to False. + * ``klass``: Class to instantiate/return. Defaults to `.Task`. + + If any non-keyword arguments are given, they are taken as the value of the + ``pre`` kwarg for convenience's sake. (It is an error to give both + ``*args`` and ``pre`` at the same time.) + + .. versionadded:: 1.0 + .. versionchanged:: 1.1 + Added the ``klass`` keyword argument. + """ + klass: Type[Task] = kwargs.pop("klass", Task) + # @task -- no options were (probably) given. + if len(args) == 1 and callable(args[0]) and not isinstance(args[0], Task): + return klass(args[0], **kwargs) + # @task(pre, tasks, here) + if args: + if "pre" in kwargs: + raise TypeError( + "May not give *args and 'pre' kwarg simultaneously!" + ) + kwargs["pre"] = args + + def inner(body: Callable) -> Task[T]: + _task = klass(body, **kwargs) + return _task + + # update_wrapper(inner, klass) + return inner + + +class Call: + """ + Represents a call/execution of a `.Task` with given (kw)args. + + Similar to `~functools.partial` with some added functionality (such as the + delegation to the inner task, and optional tracking of the name it's being + called by.) + + .. versionadded:: 1.0 + """ + + def __init__( + self, + task: "Task", + called_as: Optional[str] = None, + args: Optional[Tuple[str, ...]] = None, + kwargs: Optional[Dict[str, Any]] = None, + ) -> None: + """ + Create a new `.Call` object. + + :param task: The `.Task` object to be executed. + + :param str called_as: + The name the task is being called as, e.g. if it was called by an + alias or other rebinding. Defaults to ``None``, aka, the task was + referred to by its default name. + + :param tuple args: + Positional arguments to call with, if any. Default: ``None``. + + :param dict kwargs: + Keyword arguments to call with, if any. Default: ``None``. + """ + self.task = task + self.called_as = called_as + self.args = args or tuple() + self.kwargs = kwargs or dict() + + # TODO: just how useful is this? feels like maybe overkill magic + def __getattr__(self, name: str) -> Any: + return getattr(self.task, name) + + def __deepcopy__(self, memo: object) -> "Call": + return self.clone() + + def __repr__(self) -> str: + aka = "" + if self.called_as is not None and self.called_as != self.task.name: + aka = " (called as: {!r})".format(self.called_as) + return "<{} {!r}{}, args: {!r}, kwargs: {!r}>".format( + self.__class__.__name__, + self.task.name, + aka, + self.args, + self.kwargs, + ) + + def __eq__(self, other: object) -> bool: + # NOTE: Not comparing 'called_as'; a named call of a given Task with + # same args/kwargs should be considered same as an unnamed call of the + # same Task with the same args/kwargs (e.g. pre/post task specified w/o + # name). Ditto tasks with multiple aliases. + for attr in "task args kwargs".split(): + if getattr(self, attr) != getattr(other, attr): + return False + return True + + def make_context(self, config: "Config") -> Context: + """ + Generate a `.Context` appropriate for this call, with given config. + + .. versionadded:: 1.0 + """ + return Context(config=config) + + def clone_data(self) -> Dict[str, Any]: + """ + Return keyword args suitable for cloning this call into another. + + .. versionadded:: 1.1 + """ + return dict( + task=self.task, + called_as=self.called_as, + args=deepcopy(self.args), + kwargs=deepcopy(self.kwargs), + ) + + def clone( + self, + into: Optional[Type["Call"]] = None, + with_: Optional[Dict[str, Any]] = None, + ) -> "Call": + """ + Return a standalone copy of this Call. + + Useful when parameterizing task executions. + + :param into: + A subclass to generate instead of the current class. Optional. + + :param dict with_: + A dict of additional keyword arguments to use when creating the new + clone; typically used when cloning ``into`` a subclass that has + extra args on top of the base class. Optional. + + .. note:: + This dict is used to ``.update()`` the original object's data + (the return value from its `clone_data`), so in the event of + a conflict, values in ``with_`` will win out. + + .. versionadded:: 1.0 + .. versionchanged:: 1.1 + Added the ``with_`` kwarg. + """ + klass = into if into is not None else self.__class__ + data = self.clone_data() + if with_ is not None: + data.update(with_) + return klass(**data) + + +def call(task: "Task", *args: Any, **kwargs: Any) -> "Call": + """ + Describes execution of a `.Task`, typically with pre-supplied arguments. + + Useful for setting up :ref:`pre/post task invocations + `. It's actually just a convenient wrapper + around the `.Call` class, which may be used directly instead if desired. + + For example, here's two build-like tasks that both refer to a ``setup`` + pre-task, one with no baked-in argument values (and thus no need to use + `.call`), and one that toggles a boolean flag:: + + @task + def setup(c, clean=False): + if clean: + c.run("rm -rf target") + # ... setup things here ... + c.run("tar czvf target.tgz target") + + @task(pre=[setup]) + def build(c): + c.run("build, accounting for leftover files...") + + @task(pre=[call(setup, clean=True)]) + def clean_build(c): + c.run("build, assuming clean slate...") + + Please see the constructor docs for `.Call` for details - this function's + ``args`` and ``kwargs`` map directly to the same arguments as in that + method. + + .. versionadded:: 1.0 + """ + return Call(task, args=args, kwargs=kwargs) diff --git a/.venv/lib/python3.9/site-packages/invoke/terminals.py b/.venv/lib/python3.9/site-packages/invoke/terminals.py new file mode 100644 index 0000000..4151ba5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/terminals.py @@ -0,0 +1,248 @@ +""" +Utility functions surrounding terminal devices & I/O. + +Much of this code performs platform-sensitive branching, e.g. Windows support. + +This is its own module to abstract away what would otherwise be distracting +logic-flow interruptions. +""" + +from contextlib import contextmanager +from typing import Generator, IO, Optional, Tuple +import os +import select +import sys + +# TODO: move in here? They're currently platform-agnostic... +from .util import has_fileno, isatty + + +WINDOWS = sys.platform == "win32" +""" +Whether or not the current platform appears to be Windows in nature. + +Note that Cygwin's Python is actually close enough to "real" UNIXes that it +doesn't need (or want!) to use PyWin32 -- so we only test for literal Win32 +setups (vanilla Python, ActiveState etc) here. + +.. versionadded:: 1.0 +""" + +if sys.platform == "win32": + import msvcrt + from ctypes import ( + Structure, + c_ushort, + windll, + POINTER, + byref, + ) + from ctypes.wintypes import HANDLE, _COORD, _SMALL_RECT +else: + import fcntl + import struct + import termios + import tty + + +if sys.platform == "win32": + + def _pty_size() -> Tuple[Optional[int], Optional[int]]: + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_ = [ + ("dwSize", _COORD), + ("dwCursorPosition", _COORD), + ("wAttributes", c_ushort), + ("srWindow", _SMALL_RECT), + ("dwMaximumWindowSize", _COORD), + ] + + GetStdHandle = windll.kernel32.GetStdHandle + GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo + GetStdHandle.restype = HANDLE + GetConsoleScreenBufferInfo.argtypes = [ + HANDLE, + POINTER(CONSOLE_SCREEN_BUFFER_INFO), + ] + + hstd = GetStdHandle(-11) # STD_OUTPUT_HANDLE = -11 + csbi = CONSOLE_SCREEN_BUFFER_INFO() + ret = GetConsoleScreenBufferInfo(hstd, byref(csbi)) + + if ret: + sizex = csbi.srWindow.Right - csbi.srWindow.Left + 1 + sizey = csbi.srWindow.Bottom - csbi.srWindow.Top + 1 + return sizex, sizey + else: + return (None, None) + +else: + + def _pty_size() -> Tuple[Optional[int], Optional[int]]: + """ + Suitable for most POSIX platforms. + + .. versionadded:: 1.0 + """ + # Sentinel values to be replaced w/ defaults by caller + size = (None, None) + # We want two short unsigned integers (rows, cols) + # Note: TIOCGWINSZ struct contains 4 unsigned shorts, 2 unused + fmt = "HHHH" + # Create an empty (zeroed) buffer for ioctl to map onto. Yay for C! + buf = struct.pack(fmt, 0, 0, 0, 0) + # Call TIOCGWINSZ to get window size of stdout, returns our filled + # buffer + try: + result = fcntl.ioctl(sys.stdout, termios.TIOCGWINSZ, buf) + # Unpack buffer back into Python data types + # NOTE: this unpack gives us rows x cols, but we return the + # inverse. + rows, cols, *_ = struct.unpack(fmt, result) + return (cols, rows) + # Fallback to emptyish return value in various failure cases: + # * sys.stdout being monkeypatched, such as in testing, and lacking + # * .fileno + # * sys.stdout having a .fileno but not actually being attached to a + # * TTY + # * termios not having a TIOCGWINSZ attribute (happens sometimes...) + # * other situations where ioctl doesn't explode but the result isn't + # something unpack can deal with + except (struct.error, TypeError, IOError, AttributeError): + pass + return size + + +def pty_size() -> Tuple[int, int]: + """ + Determine current local pseudoterminal dimensions. + + :returns: + A ``(num_cols, num_rows)`` two-tuple describing PTY size. Defaults to + ``(80, 24)`` if unable to get a sensible result dynamically. + + .. versionadded:: 1.0 + """ + cols, rows = _pty_size() + # TODO: make defaults configurable? + return (cols or 80, rows or 24) + + +def stdin_is_foregrounded_tty(stream: IO) -> bool: + """ + Detect if given stdin ``stream`` seems to be in the foreground of a TTY. + + Specifically, compares the current Python process group ID to that of the + stream's file descriptor to see if they match; if they do not match, it is + likely that the process has been placed in the background. + + This is used as a test to determine whether we should manipulate an active + stdin so it runs in a character-buffered mode; touching the terminal in + this way when the process is backgrounded, causes most shells to pause + execution. + + .. note:: + Processes that aren't attached to a terminal to begin with, will always + fail this test, as it starts with "do you have a real ``fileno``?". + + .. versionadded:: 1.0 + """ + if not has_fileno(stream): + return False + return os.getpgrp() == os.tcgetpgrp(stream.fileno()) + + +def cbreak_already_set(stream: IO) -> bool: + # Explicitly not docstringed to remain private, for now. Eh. + # Checks whether tty.setcbreak appears to have already been run against + # ``stream`` (or if it would otherwise just not do anything). + # Used to effect idempotency for character-buffering a stream, which also + # lets us avoid multiple capture-then-restore cycles. + attrs = termios.tcgetattr(stream) + lflags, cc = attrs[3], attrs[6] + echo = bool(lflags & termios.ECHO) + icanon = bool(lflags & termios.ICANON) + # setcbreak sets ECHO and ICANON to 0/off, CC[VMIN] to 1-ish, and CC[VTIME] + # to 0-ish. If any of that is not true we can reasonably assume it has not + # yet been executed against this stream. + sentinels = ( + not echo, + not icanon, + cc[termios.VMIN] in [1, b"\x01"], + cc[termios.VTIME] in [0, b"\x00"], + ) + return all(sentinels) + + +@contextmanager +def character_buffered( + stream: IO, +) -> Generator[None, None, None]: + """ + Force local terminal ``stream`` be character, not line, buffered. + + Only applies to Unix-based systems; on Windows this is a no-op. + + .. versionadded:: 1.0 + """ + if ( + WINDOWS + or not isatty(stream) + or not stdin_is_foregrounded_tty(stream) + or cbreak_already_set(stream) + ): + yield + else: + old_settings = termios.tcgetattr(stream) + tty.setcbreak(stream) + try: + yield + finally: + termios.tcsetattr(stream, termios.TCSADRAIN, old_settings) + + +def ready_for_reading(input_: IO) -> bool: + """ + Test ``input_`` to determine whether a read action will succeed. + + :param input_: Input stream object (file-like). + + :returns: ``True`` if a read should succeed, ``False`` otherwise. + + .. versionadded:: 1.0 + """ + # A "real" terminal stdin needs select/kbhit to tell us when it's ready for + # a nonblocking read(). + # Otherwise, assume a "safer" file-like object that can be read from in a + # nonblocking fashion (e.g. a StringIO or regular file). + if not has_fileno(input_): + return True + if sys.platform == "win32": + return msvcrt.kbhit() + else: + reads, _, _ = select.select([input_], [], [], 0.0) + return bool(reads and reads[0] is input_) + + +def bytes_to_read(input_: IO) -> int: + """ + Query stream ``input_`` to see how many bytes may be readable. + + .. note:: + If we are unable to tell (e.g. if ``input_`` isn't a true file + descriptor or isn't a valid TTY) we fall back to suggesting reading 1 + byte only. + + :param input: Input stream object (file-like). + + :returns: `int` number of bytes to read. + + .. versionadded:: 1.0 + """ + # NOTE: we have to check both possibilities here; situations exist where + # it's not a tty but has a fileno, or vice versa; neither is typically + # going to work re: ioctl(). + if not WINDOWS and isatty(input_) and has_fileno(input_): + fionread = fcntl.ioctl(input_, termios.FIONREAD, b" ") + return int(struct.unpack("h", fionread)[0]) + return 1 diff --git a/.venv/lib/python3.9/site-packages/invoke/util.py b/.venv/lib/python3.9/site-packages/invoke/util.py new file mode 100644 index 0000000..df29c84 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/util.py @@ -0,0 +1,268 @@ +from collections import namedtuple +from contextlib import contextmanager +from types import TracebackType +from typing import Any, Generator, List, IO, Optional, Tuple, Type, Union +import io +import logging +import os +import threading +import sys + +# NOTE: This is the canonical location for commonly-used vendored modules, +# which is the only spot that performs this try/except to allow repackaged +# Invoke to function (e.g. distro packages which unvendor the vendored bits and +# thus must import our 'vendored' stuff from the overall environment.) +# All other uses of Lexicon, etc should do 'from .util import lexicon' etc. +# Saves us from having to update the same logic in a dozen places. +# TODO: would this make more sense to put _into_ invoke.vendor? That way, the +# import lines which now read 'from .util import ' would be +# more obvious. Requires packagers to leave invoke/vendor/__init__.py alone tho +try: + from .vendor.lexicon import Lexicon # noqa + from .vendor import yaml # noqa +except ImportError: + from lexicon import Lexicon # type: ignore[no-redef] # noqa + import yaml # type: ignore[no-redef] # noqa + + +LOG_FORMAT = "%(name)s.%(module)s.%(funcName)s: %(message)s" + + +def enable_logging() -> None: + logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT) + + +# Allow from-the-start debugging (vs toggled during load of tasks module) via +# shell env var. +if os.environ.get("INVOKE_DEBUG"): + enable_logging() + +# Add top level logger functions to global namespace. Meh. +log = logging.getLogger("invoke") +debug = log.debug + + +def task_name_sort_key(name: str) -> Tuple[List[str], str]: + """ + Return key tuple for use sorting dotted task names, via e.g. `sorted`. + + .. versionadded:: 1.0 + """ + parts = name.split(".") + return ( + # First group/sort by non-leaf path components. This keeps everything + # grouped in its hierarchy, and incidentally puts top-level tasks + # (whose non-leaf path set is the empty list) first, where we want them + parts[:-1], + # Then we sort lexicographically by the actual task name + parts[-1], + ) + + +# TODO: Make part of public API sometime +@contextmanager +def cd(where: str) -> Generator[None, None, None]: + cwd = os.getcwd() + os.chdir(where) + try: + yield + finally: + os.chdir(cwd) + + +def has_fileno(stream: IO) -> bool: + """ + Cleanly determine whether ``stream`` has a useful ``.fileno()``. + + .. note:: + This function helps determine if a given file-like object can be used + with various terminal-oriented modules and functions such as `select`, + `termios`, and `tty`. For most of those, a fileno is all that is + required; they'll function even if ``stream.isatty()`` is ``False``. + + :param stream: A file-like object. + + :returns: + ``True`` if ``stream.fileno()`` returns an integer, ``False`` otherwise + (this includes when ``stream`` lacks a ``fileno`` method). + + .. versionadded:: 1.0 + """ + try: + return isinstance(stream.fileno(), int) + except (AttributeError, io.UnsupportedOperation): + return False + + +def isatty(stream: IO) -> Union[bool, Any]: + """ + Cleanly determine whether ``stream`` is a TTY. + + Specifically, first try calling ``stream.isatty()``, and if that fails + (e.g. due to lacking the method entirely) fallback to `os.isatty`. + + .. note:: + Most of the time, we don't actually care about true TTY-ness, but + merely whether the stream seems to have a fileno (per `has_fileno`). + However, in some cases (notably the use of `pty.fork` to present a + local pseudoterminal) we need to tell if a given stream has a valid + fileno but *isn't* tied to an actual terminal. Thus, this function. + + :param stream: A file-like object. + + :returns: + A boolean depending on the result of calling ``.isatty()`` and/or + `os.isatty`. + + .. versionadded:: 1.0 + """ + # If there *is* an .isatty, ask it. + if hasattr(stream, "isatty") and callable(stream.isatty): + return stream.isatty() + # If there wasn't, see if it has a fileno, and if so, ask os.isatty + elif has_fileno(stream): + return os.isatty(stream.fileno()) + # If we got here, none of the above worked, so it's reasonable to assume + # the darn thing isn't a real TTY. + return False + + +def helpline(obj: object) -> Optional[str]: + """ + Yield an object's first docstring line, or None if there was no docstring. + + .. versionadded:: 1.0 + """ + docstring = obj.__doc__ + if ( + not docstring + or not docstring.strip() + or docstring == type(obj).__doc__ + ): + return None + return docstring.lstrip().splitlines()[0] + + +class ExceptionHandlingThread(threading.Thread): + """ + Thread handler making it easier for parent to handle thread exceptions. + + Based in part on Fabric 1's ThreadHandler. See also Fabric GH issue #204. + + When used directly, can be used in place of a regular ``threading.Thread``. + If subclassed, the subclass must do one of: + + - supply ``target`` to ``__init__`` + - define ``_run()`` instead of ``run()`` + + This is because this thread's entire point is to wrap behavior around the + thread's execution; subclasses could not redefine ``run()`` without + breaking that functionality. + + .. versionadded:: 1.0 + """ + + def __init__(self, **kwargs: Any) -> None: + """ + Create a new exception-handling thread instance. + + Takes all regular `threading.Thread` keyword arguments, via + ``**kwargs`` for easier display of thread identity when raising + captured exceptions. + """ + super().__init__(**kwargs) + # No record of why, but Fabric used daemon threads ever since the + # switch from select.select, so let's keep doing that. + self.daemon = True + # Track exceptions raised in run() + self.kwargs = kwargs + # TODO: legacy cruft that needs to be removed + self.exc_info: Optional[ + Union[ + Tuple[Type[BaseException], BaseException, TracebackType], + Tuple[None, None, None], + ] + ] = None + + def run(self) -> None: + try: + # Allow subclasses implemented using the "override run()'s body" + # approach to work, by using _run() instead of run(). If that + # doesn't appear to be the case, then assume we're being used + # directly and just use super() ourselves. + # XXX https://github.com/python/mypy/issues/1424 + if hasattr(self, "_run") and callable(self._run): # type: ignore + # TODO: this could be: + # - io worker with no 'result' (always local) + # - tunnel worker, also with no 'result' (also always local) + # - threaded concurrent run(), sudo(), put(), etc, with a + # result (not necessarily local; might want to be a subproc or + # whatever eventually) + # TODO: so how best to conditionally add a "capture result + # value of some kind"? + # - update so all use cases use subclassing, add functionality + # alongside self.exception() that is for the result of _run() + # - split out class that does not care about result of _run() + # and let it continue acting like a normal thread (meh) + # - assume the run/sudo/etc case will use a queue inside its + # worker body, orthogonal to how exception handling works + self._run() # type: ignore + else: + super().run() + except BaseException: + # Store for actual reraising later + self.exc_info = sys.exc_info() + # And log now, in case we never get to later (e.g. if executing + # program is hung waiting for us to do something) + msg = "Encountered exception {!r} in thread for {!r}" + # Name is either target function's dunder-name, or just "_run" if + # we were run subclass-wise. + name = "_run" + if "target" in self.kwargs: + name = self.kwargs["target"].__name__ + debug(msg.format(self.exc_info[1], name)) # noqa + + def exception(self) -> Optional["ExceptionWrapper"]: + """ + If an exception occurred, return an `.ExceptionWrapper` around it. + + :returns: + An `.ExceptionWrapper` managing the result of `sys.exc_info`, if an + exception was raised during thread execution. If no exception + occurred, returns ``None`` instead. + + .. versionadded:: 1.0 + """ + if self.exc_info is None: + return None + return ExceptionWrapper(self.kwargs, *self.exc_info) + + @property + def is_dead(self) -> bool: + """ + Returns ``True`` if not alive and has a stored exception. + + Used to detect threads that have excepted & shut down. + + .. versionadded:: 1.0 + """ + # NOTE: it seems highly unlikely that a thread could still be + # is_alive() but also have encountered an exception. But hey. Why not + # be thorough? + return (not self.is_alive()) and self.exc_info is not None + + def __repr__(self) -> str: + # TODO: beef this up more + return str(self.kwargs["target"].__name__) + + +# NOTE: ExceptionWrapper defined here, not in exceptions.py, to avoid circular +# dependency issues (e.g. Failure subclasses need to use some bits from this +# module...) +#: A namedtuple wrapping a thread-borne exception & that thread's arguments. +#: Mostly used as an intermediate between `.ExceptionHandlingThread` (which +#: preserves initial exceptions) and `.ThreadException` (which holds 1..N such +#: exceptions, as typically multiple threads are involved.) +ExceptionWrapper = namedtuple( + "ExceptionWrapper", "kwargs type value traceback" +) diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/__init__.py b/.venv/lib/python3.9/site-packages/invoke/vendor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..7b24a8c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__init__.py b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__init__.py new file mode 100644 index 0000000..3339fef --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__init__.py @@ -0,0 +1,4 @@ +from .machine import (StateMachine, state, transition, + InvalidConfiguration, InvalidTransition, + GuardNotSatisfied, ForkedTransition) + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..f8184bd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__pycache__/backwardscompat.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__pycache__/backwardscompat.cpython-39.pyc new file mode 100644 index 0000000..14921fc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__pycache__/backwardscompat.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__pycache__/machine.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__pycache__/machine.cpython-39.pyc new file mode 100644 index 0000000..b38ed4d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/__pycache__/machine.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/backwardscompat.py b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/backwardscompat.py new file mode 100644 index 0000000..88eac4f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/backwardscompat.py @@ -0,0 +1,8 @@ +import sys + +if sys.version_info >= (3,): + def callable(obj): + return hasattr(obj, '__call__') +else: + callable = callable + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/machine.py b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/machine.py new file mode 100644 index 0000000..da9fdda --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/fluidity/machine.py @@ -0,0 +1,270 @@ +import re +import inspect +from .backwardscompat import callable + +# metaclass implementation idea from +# http://blog.ianbicking.org/more-on-python-metaprogramming-comment-14.html +_transition_gatherer = [] + +def transition(event, from_, to, action=None, guard=None): + _transition_gatherer.append([event, from_, to, action, guard]) + +_state_gatherer = [] + +def state(name, enter=None, exit=None): + _state_gatherer.append([name, enter, exit]) + + +class MetaStateMachine(type): + + def __new__(cls, name, bases, dictionary): + global _transition_gatherer, _state_gatherer + Machine = super(MetaStateMachine, cls).__new__(cls, name, bases, dictionary) + Machine._class_transitions = [] + Machine._class_states = {} + for s in _state_gatherer: + Machine._add_class_state(*s) + for i in _transition_gatherer: + Machine._add_class_transition(*i) + _transition_gatherer = [] + _state_gatherer = [] + return Machine + + +StateMachineBase = MetaStateMachine('StateMachineBase', (object, ), {}) + + +class StateMachine(StateMachineBase): + + def __init__(self): + self._bring_definitions_to_object_level() + self._inject_into_parts() + self._validate_machine_definitions() + if callable(self.initial_state): + self.initial_state = self.initial_state() + self._current_state_object = self._state_by_name(self.initial_state) + self._current_state_object.run_enter(self) + self._create_state_getters() + + def __new__(cls, *args, **kwargs): + obj = super(StateMachine, cls).__new__(cls) + obj._states = {} + obj._transitions = [] + return obj + + def _bring_definitions_to_object_level(self): + self._states.update(self.__class__._class_states) + self._transitions.extend(self.__class__._class_transitions) + + def _inject_into_parts(self): + for collection in [self._states.values(), self._transitions]: + for component in collection: + component.machine = self + + def _validate_machine_definitions(self): + if len(self._states) < 2: + raise InvalidConfiguration('There must be at least two states') + if not getattr(self, 'initial_state', None): + raise InvalidConfiguration('There must exist an initial state') + + @classmethod + def _add_class_state(cls, name, enter, exit): + cls._class_states[name] = _State(name, enter, exit) + + def add_state(self, name, enter=None, exit=None): + state = _State(name, enter, exit) + setattr(self, state.getter_name(), state.getter_method().__get__(self, self.__class__)) + self._states[name] = state + + def _current_state_name(self): + return self._current_state_object.name + + current_state = property(_current_state_name) + + def changing_state(self, from_, to): + """ + This method is called whenever a state change is executed + """ + pass + + def _new_state(self, state): + self.changing_state(self._current_state_object.name, state.name) + self._current_state_object = state + + def _state_objects(self): + return list(self._states.values()) + + def states(self): + return [s.name for s in self._state_objects()] + + @classmethod + def _add_class_transition(cls, event, from_, to, action, guard): + transition = _Transition(event, [cls._class_states[s] for s in _listize(from_)], + cls._class_states[to], action, guard) + cls._class_transitions.append(transition) + setattr(cls, event, transition.event_method()) + + def add_transition(self, event, from_, to, action=None, guard=None): + transition = _Transition(event, [self._state_by_name(s) for s in _listize(from_)], + self._state_by_name(to), action, guard) + self._transitions.append(transition) + setattr(self, event, transition.event_method().__get__(self, self.__class__)) + + def _process_transitions(self, event_name, *args, **kwargs): + transitions = self._transitions_by_name(event_name) + transitions = self._ensure_from_validity(transitions) + this_transition = self._check_guards(transitions) + this_transition.run(self, *args, **kwargs) + + def _create_state_getters(self): + for state in self._state_objects(): + setattr(self, state.getter_name(), state.getter_method().__get__(self, self.__class__)) + + def _state_by_name(self, name): + for state in self._state_objects(): + if state.name == name: + return state + + def _transitions_by_name(self, name): + return list(filter(lambda transition: transition.event == name, self._transitions)) + + def _ensure_from_validity(self, transitions): + valid_transitions = list(filter( + lambda transition: transition.is_valid_from(self._current_state_object), + transitions)) + if len(valid_transitions) == 0: + raise InvalidTransition("Cannot %s from %s" % ( + transitions[0].event, self.current_state)) + return valid_transitions + + def _check_guards(self, transitions): + allowed_transitions = [] + for transition in transitions: + if transition.check_guard(self): + allowed_transitions.append(transition) + if len(allowed_transitions) == 0: + raise GuardNotSatisfied("Guard is not satisfied for this transition") + elif len(allowed_transitions) > 1: + raise ForkedTransition("More than one transition was allowed for this event") + return allowed_transitions[0] + + +class _Transition(object): + + def __init__(self, event, from_, to, action, guard): + self.event = event + self.from_ = from_ + self.to = to + self.action = action + self.guard = _Guard(guard) + + def event_method(self): + def generated_event(machine, *args, **kwargs): + these_transitions = machine._process_transitions(self.event, *args, **kwargs) + generated_event.__doc__ = 'event %s' % self.event + generated_event.__name__ = self.event + return generated_event + + def is_valid_from(self, from_): + return from_ in _listize(self.from_) + + def check_guard(self, machine): + return self.guard.check(machine) + + def run(self, machine, *args, **kwargs): + machine._current_state_object.run_exit(machine) + machine._new_state(self.to) + self.to.run_enter(machine) + _ActionRunner(machine).run(self.action, *args, **kwargs) + + +class _Guard(object): + + def __init__(self, action): + self.action = action + + def check(self, machine): + if self.action is None: + return True + items = _listize(self.action) + result = True + for item in items: + result = result and self._evaluate(machine, item) + return result + + def _evaluate(self, machine, item): + if callable(item): + return item(machine) + else: + guard = getattr(machine, item) + if callable(guard): + guard = guard() + return guard + + +class _State(object): + + def __init__(self, name, enter, exit): + self.name = name + self.enter = enter + self.exit = exit + + def getter_name(self): + return 'is_%s' % self.name + + def getter_method(self): + def state_getter(self_machine): + return self_machine.current_state == self.name + return state_getter + + def run_enter(self, machine): + _ActionRunner(machine).run(self.enter) + + def run_exit(self, machine): + _ActionRunner(machine).run(self.exit) + + +class _ActionRunner(object): + + def __init__(self, machine): + self.machine = machine + + def run(self, action_param, *args, **kwargs): + if not action_param: + return + action_items = _listize(action_param) + for action_item in action_items: + self._run_action(action_item, *args, **kwargs) + + def _run_action(self, action, *args, **kwargs): + if callable(action): + self._try_to_run_with_args(action, self.machine, *args, **kwargs) + else: + self._try_to_run_with_args(getattr(self.machine, action), *args, **kwargs) + + def _try_to_run_with_args(self, action, *args, **kwargs): + try: + action(*args, **kwargs) + except TypeError: + action() + + +class InvalidConfiguration(Exception): + pass + + +class InvalidTransition(Exception): + pass + + +class GuardNotSatisfied(Exception): + pass + + +class ForkedTransition(Exception): + pass + + +def _listize(value): + return type(value) in [list, tuple] and value or [value] + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__init__.py b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__init__.py new file mode 100644 index 0000000..c7f65d3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__init__.py @@ -0,0 +1,24 @@ +from ._version import __version_info__, __version__ # noqa +from .attribute_dict import AttributeDict +from .alias_dict import AliasDict + + +class Lexicon(AttributeDict, AliasDict): + def __init__(self, *args, **kwargs): + # Need to avoid combining AliasDict's initial attribute write on + # self.aliases, with AttributeDict's __setattr__. Doing so results in + # an infinite loop. Instead, just skip straight to dict() for both + # explicitly (i.e. we override AliasDict.__init__ instead of extending + # it.) + # NOTE: could tickle AttributeDict.__init__ instead, in case it ever + # grows one. + dict.__init__(self, *args, **kwargs) + dict.__setattr__(self, "aliases", {}) + + def __getattr__(self, key): + # Intercept deepcopy/etc driven access to self.aliases when not + # actually set. (Only a problem for us, due to abovementioned combo of + # Alias and Attribute Dicts, so not solvable in a parent alone.) + if key == "aliases" and key not in self.__dict__: + self.__dict__[key] = {} + return super(Lexicon, self).__getattr__(key) diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..784a8d1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/_version.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/_version.cpython-39.pyc new file mode 100644 index 0000000..728a6f8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/_version.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/alias_dict.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/alias_dict.cpython-39.pyc new file mode 100644 index 0000000..78499b8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/alias_dict.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/attribute_dict.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/attribute_dict.cpython-39.pyc new file mode 100644 index 0000000..cdc7f17 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/__pycache__/attribute_dict.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/_version.py b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/_version.py new file mode 100644 index 0000000..f55a4f1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/_version.py @@ -0,0 +1,2 @@ +__version_info__ = (2, 0, 1) +__version__ = ".".join(map(str, __version_info__)) diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/alias_dict.py b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/alias_dict.py new file mode 100644 index 0000000..f2191fb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/alias_dict.py @@ -0,0 +1,95 @@ +class AliasDict(dict): + def __init__(self, *args, **kwargs): + super(AliasDict, self).__init__(*args, **kwargs) + self.aliases = {} + + def alias(self, from_, to): + self.aliases[from_] = to + + def unalias(self, from_): + del self.aliases[from_] + + def aliases_of(self, name): + """ + Returns other names for given real key or alias ``name``. + + If given a real key, returns its aliases. + + If given an alias, returns the real key it points to, plus any other + aliases of that real key. (The given alias itself is not included in + the return value.) + """ + names = [] + key = name + # self.aliases keys are aliases, not realkeys. Easy test to see if we + # should flip around to the POV of a realkey when given an alias. + if name in self.aliases: + key = self.aliases[name] + # Ensure the real key shows up in output. + names.append(key) + # 'key' is now a realkey, whose aliases are all keys whose value is + # itself. Filter out the original name given. + names.extend( + [k for k, v in self.aliases.items() if v == key and k != name] + ) + return names + + def _handle(self, key, value, single, multi, unaliased): + # Attribute existence test required to not blow up when deepcopy'd + if key in getattr(self, "aliases", {}): + target = self.aliases[key] + # Single-string targets + if isinstance(target, str): + return single(self, target, value) + # Multi-string targets + else: + if multi: + return multi(self, target, value) + else: + for subkey in target: + single(self, subkey, value) + else: + return unaliased(self, key, value) + + def __setitem__(self, key, value): + def single(d, target, value): + d[target] = value + + def unaliased(d, key, value): + super(AliasDict, d).__setitem__(key, value) + + return self._handle(key, value, single, None, unaliased) + + def __getitem__(self, key): + def single(d, target, value): + return d[target] + + def unaliased(d, key, value): + return super(AliasDict, d).__getitem__(key) + + def multi(d, target, value): + msg = "Multi-target aliases have no well-defined value and can't be read." # noqa + raise ValueError(msg) + + return self._handle(key, None, single, multi, unaliased) + + def __contains__(self, key): + def single(d, target, value): + return target in d + + def multi(d, target, value): + return all(subkey in self for subkey in self.aliases[key]) + + def unaliased(d, key, value): + return super(AliasDict, d).__contains__(key) + + return self._handle(key, None, single, multi, unaliased) + + def __delitem__(self, key): + def single(d, target, value): + del d[target] + + def unaliased(d, key, value): + return super(AliasDict, d).__delitem__(key) + + return self._handle(key, None, single, None, unaliased) diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/attribute_dict.py b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/attribute_dict.py new file mode 100644 index 0000000..5d09f13 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/lexicon/attribute_dict.py @@ -0,0 +1,16 @@ +class AttributeDict(dict): + def __getattr__(self, key): + try: + return self[key] + except KeyError: + # to conform with __getattr__ spec + raise AttributeError(key) + + def __setattr__(self, key, value): + self[key] = value + + def __delattr__(self, key): + del self[key] + + def __dir__(self): + return dir(type(self)) + list(self.keys()) diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__init__.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__init__.py new file mode 100644 index 0000000..86d07b5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__init__.py @@ -0,0 +1,427 @@ + +from .error import * + +from .tokens import * +from .events import * +from .nodes import * + +from .loader import * +from .dumper import * + +__version__ = '5.4.1' +try: + from .cyaml import * + __with_libyaml__ = True +except ImportError: + __with_libyaml__ = False + +import io + +#------------------------------------------------------------------------------ +# Warnings control +#------------------------------------------------------------------------------ + +# 'Global' warnings state: +_warnings_enabled = { + 'YAMLLoadWarning': True, +} + +# Get or set global warnings' state +def warnings(settings=None): + if settings is None: + return _warnings_enabled + + if type(settings) is dict: + for key in settings: + if key in _warnings_enabled: + _warnings_enabled[key] = settings[key] + +# Warn when load() is called without Loader=... +class YAMLLoadWarning(RuntimeWarning): + pass + +def load_warning(method): + if _warnings_enabled['YAMLLoadWarning'] is False: + return + + import warnings + + message = ( + "calling yaml.%s() without Loader=... is deprecated, as the " + "default Loader is unsafe. Please read " + "https://msg.pyyaml.org/load for full details." + ) % method + + warnings.warn(message, YAMLLoadWarning, stacklevel=3) + +#------------------------------------------------------------------------------ +def scan(stream, Loader=Loader): + """ + Scan a YAML stream and produce scanning tokens. + """ + loader = Loader(stream) + try: + while loader.check_token(): + yield loader.get_token() + finally: + loader.dispose() + +def parse(stream, Loader=Loader): + """ + Parse a YAML stream and produce parsing events. + """ + loader = Loader(stream) + try: + while loader.check_event(): + yield loader.get_event() + finally: + loader.dispose() + +def compose(stream, Loader=Loader): + """ + Parse the first YAML document in a stream + and produce the corresponding representation tree. + """ + loader = Loader(stream) + try: + return loader.get_single_node() + finally: + loader.dispose() + +def compose_all(stream, Loader=Loader): + """ + Parse all YAML documents in a stream + and produce corresponding representation trees. + """ + loader = Loader(stream) + try: + while loader.check_node(): + yield loader.get_node() + finally: + loader.dispose() + +def load(stream, Loader=None): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + """ + if Loader is None: + load_warning('load') + Loader = FullLoader + + loader = Loader(stream) + try: + return loader.get_single_data() + finally: + loader.dispose() + +def load_all(stream, Loader=None): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + """ + if Loader is None: + load_warning('load_all') + Loader = FullLoader + + loader = Loader(stream) + try: + while loader.check_data(): + yield loader.get_data() + finally: + loader.dispose() + +def full_load(stream): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + + Resolve all tags except those known to be + unsafe on untrusted input. + """ + return load(stream, FullLoader) + +def full_load_all(stream): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + + Resolve all tags except those known to be + unsafe on untrusted input. + """ + return load_all(stream, FullLoader) + +def safe_load(stream): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + + Resolve only basic YAML tags. This is known + to be safe for untrusted input. + """ + return load(stream, SafeLoader) + +def safe_load_all(stream): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + + Resolve only basic YAML tags. This is known + to be safe for untrusted input. + """ + return load_all(stream, SafeLoader) + +def unsafe_load(stream): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + + Resolve all tags, even those known to be + unsafe on untrusted input. + """ + return load(stream, UnsafeLoader) + +def unsafe_load_all(stream): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + + Resolve all tags, even those known to be + unsafe on untrusted input. + """ + return load_all(stream, UnsafeLoader) + +def emit(events, stream=None, Dumper=Dumper, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None): + """ + Emit YAML parsing events into a stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + stream = io.StringIO() + getvalue = stream.getvalue + dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + try: + for event in events: + dumper.emit(event) + finally: + dumper.dispose() + if getvalue: + return getvalue() + +def serialize_all(nodes, stream=None, Dumper=Dumper, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None): + """ + Serialize a sequence of representation trees into a YAML stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + if encoding is None: + stream = io.StringIO() + else: + stream = io.BytesIO() + getvalue = stream.getvalue + dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break, + encoding=encoding, version=version, tags=tags, + explicit_start=explicit_start, explicit_end=explicit_end) + try: + dumper.open() + for node in nodes: + dumper.serialize(node) + dumper.close() + finally: + dumper.dispose() + if getvalue: + return getvalue() + +def serialize(node, stream=None, Dumper=Dumper, **kwds): + """ + Serialize a representation tree into a YAML stream. + If stream is None, return the produced string instead. + """ + return serialize_all([node], stream, Dumper=Dumper, **kwds) + +def dump_all(documents, stream=None, Dumper=Dumper, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + """ + Serialize a sequence of Python objects into a YAML stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + if encoding is None: + stream = io.StringIO() + else: + stream = io.BytesIO() + getvalue = stream.getvalue + dumper = Dumper(stream, default_style=default_style, + default_flow_style=default_flow_style, + canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break, + encoding=encoding, version=version, tags=tags, + explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys) + try: + dumper.open() + for data in documents: + dumper.represent(data) + dumper.close() + finally: + dumper.dispose() + if getvalue: + return getvalue() + +def dump(data, stream=None, Dumper=Dumper, **kwds): + """ + Serialize a Python object into a YAML stream. + If stream is None, return the produced string instead. + """ + return dump_all([data], stream, Dumper=Dumper, **kwds) + +def safe_dump_all(documents, stream=None, **kwds): + """ + Serialize a sequence of Python objects into a YAML stream. + Produce only basic YAML tags. + If stream is None, return the produced string instead. + """ + return dump_all(documents, stream, Dumper=SafeDumper, **kwds) + +def safe_dump(data, stream=None, **kwds): + """ + Serialize a Python object into a YAML stream. + Produce only basic YAML tags. + If stream is None, return the produced string instead. + """ + return dump_all([data], stream, Dumper=SafeDumper, **kwds) + +def add_implicit_resolver(tag, regexp, first=None, + Loader=None, Dumper=Dumper): + """ + Add an implicit scalar detector. + If an implicit scalar value matches the given regexp, + the corresponding tag is assigned to the scalar. + first is a sequence of possible initial characters or None. + """ + if Loader is None: + loader.Loader.add_implicit_resolver(tag, regexp, first) + loader.FullLoader.add_implicit_resolver(tag, regexp, first) + loader.UnsafeLoader.add_implicit_resolver(tag, regexp, first) + else: + Loader.add_implicit_resolver(tag, regexp, first) + Dumper.add_implicit_resolver(tag, regexp, first) + +def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=Dumper): + """ + Add a path based resolver for the given tag. + A path is a list of keys that forms a path + to a node in the representation tree. + Keys can be string values, integers, or None. + """ + if Loader is None: + loader.Loader.add_path_resolver(tag, path, kind) + loader.FullLoader.add_path_resolver(tag, path, kind) + loader.UnsafeLoader.add_path_resolver(tag, path, kind) + else: + Loader.add_path_resolver(tag, path, kind) + Dumper.add_path_resolver(tag, path, kind) + +def add_constructor(tag, constructor, Loader=None): + """ + Add a constructor for the given tag. + Constructor is a function that accepts a Loader instance + and a node object and produces the corresponding Python object. + """ + if Loader is None: + loader.Loader.add_constructor(tag, constructor) + loader.FullLoader.add_constructor(tag, constructor) + loader.UnsafeLoader.add_constructor(tag, constructor) + else: + Loader.add_constructor(tag, constructor) + +def add_multi_constructor(tag_prefix, multi_constructor, Loader=None): + """ + Add a multi-constructor for the given tag prefix. + Multi-constructor is called for a node if its tag starts with tag_prefix. + Multi-constructor accepts a Loader instance, a tag suffix, + and a node object and produces the corresponding Python object. + """ + if Loader is None: + loader.Loader.add_multi_constructor(tag_prefix, multi_constructor) + loader.FullLoader.add_multi_constructor(tag_prefix, multi_constructor) + loader.UnsafeLoader.add_multi_constructor(tag_prefix, multi_constructor) + else: + Loader.add_multi_constructor(tag_prefix, multi_constructor) + +def add_representer(data_type, representer, Dumper=Dumper): + """ + Add a representer for the given type. + Representer is a function accepting a Dumper instance + and an instance of the given data type + and producing the corresponding representation node. + """ + Dumper.add_representer(data_type, representer) + +def add_multi_representer(data_type, multi_representer, Dumper=Dumper): + """ + Add a representer for the given type. + Multi-representer is a function accepting a Dumper instance + and an instance of the given data type or subtype + and producing the corresponding representation node. + """ + Dumper.add_multi_representer(data_type, multi_representer) + +class YAMLObjectMetaclass(type): + """ + The metaclass for YAMLObject. + """ + def __init__(cls, name, bases, kwds): + super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) + if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: + if isinstance(cls.yaml_loader, list): + for loader in cls.yaml_loader: + loader.add_constructor(cls.yaml_tag, cls.from_yaml) + else: + cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) + + cls.yaml_dumper.add_representer(cls, cls.to_yaml) + +class YAMLObject(metaclass=YAMLObjectMetaclass): + """ + An object that can dump itself to a YAML stream + and load itself from a YAML stream. + """ + + __slots__ = () # no direct instantiation, so allow immutable subclasses + + yaml_loader = [Loader, FullLoader, UnsafeLoader] + yaml_dumper = Dumper + + yaml_tag = None + yaml_flow_style = None + + @classmethod + def from_yaml(cls, loader, node): + """ + Convert a representation node to a Python object. + """ + return loader.construct_yaml_object(node, cls) + + @classmethod + def to_yaml(cls, dumper, data): + """ + Convert a Python object to a representation node. + """ + return dumper.represent_yaml_object(cls.yaml_tag, data, cls, + flow_style=cls.yaml_flow_style) + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..1a35411 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/composer.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/composer.cpython-39.pyc new file mode 100644 index 0000000..0efa28b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/composer.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/constructor.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/constructor.cpython-39.pyc new file mode 100644 index 0000000..a53908e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/constructor.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/cyaml.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/cyaml.cpython-39.pyc new file mode 100644 index 0000000..ae29d04 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/cyaml.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/dumper.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/dumper.cpython-39.pyc new file mode 100644 index 0000000..054b139 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/dumper.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/emitter.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/emitter.cpython-39.pyc new file mode 100644 index 0000000..1edce16 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/emitter.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/error.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/error.cpython-39.pyc new file mode 100644 index 0000000..188088d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/error.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/events.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/events.cpython-39.pyc new file mode 100644 index 0000000..f0924b1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/events.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/loader.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/loader.cpython-39.pyc new file mode 100644 index 0000000..cce5b54 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/loader.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/nodes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/nodes.cpython-39.pyc new file mode 100644 index 0000000..29659ae Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/nodes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/parser.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/parser.cpython-39.pyc new file mode 100644 index 0000000..8466ab5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/parser.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/reader.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/reader.cpython-39.pyc new file mode 100644 index 0000000..c8ef3e8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/reader.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/representer.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/representer.cpython-39.pyc new file mode 100644 index 0000000..558b1de Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/representer.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/resolver.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/resolver.cpython-39.pyc new file mode 100644 index 0000000..4b1e142 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/resolver.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/scanner.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/scanner.cpython-39.pyc new file mode 100644 index 0000000..33e8647 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/scanner.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/serializer.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/serializer.cpython-39.pyc new file mode 100644 index 0000000..0df198d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/serializer.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/tokens.cpython-39.pyc b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/tokens.cpython-39.pyc new file mode 100644 index 0000000..b682d72 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/__pycache__/tokens.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/composer.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/composer.py new file mode 100644 index 0000000..6d15cb4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/composer.py @@ -0,0 +1,139 @@ + +__all__ = ['Composer', 'ComposerError'] + +from .error import MarkedYAMLError +from .events import * +from .nodes import * + +class ComposerError(MarkedYAMLError): + pass + +class Composer: + + def __init__(self): + self.anchors = {} + + def check_node(self): + # Drop the STREAM-START event. + if self.check_event(StreamStartEvent): + self.get_event() + + # If there are more documents available? + return not self.check_event(StreamEndEvent) + + def get_node(self): + # Get the root node of the next document. + if not self.check_event(StreamEndEvent): + return self.compose_document() + + def get_single_node(self): + # Drop the STREAM-START event. + self.get_event() + + # Compose a document if the stream is not empty. + document = None + if not self.check_event(StreamEndEvent): + document = self.compose_document() + + # Ensure that the stream contains no more documents. + if not self.check_event(StreamEndEvent): + event = self.get_event() + raise ComposerError("expected a single document in the stream", + document.start_mark, "but found another document", + event.start_mark) + + # Drop the STREAM-END event. + self.get_event() + + return document + + def compose_document(self): + # Drop the DOCUMENT-START event. + self.get_event() + + # Compose the root node. + node = self.compose_node(None, None) + + # Drop the DOCUMENT-END event. + self.get_event() + + self.anchors = {} + return node + + def compose_node(self, parent, index): + if self.check_event(AliasEvent): + event = self.get_event() + anchor = event.anchor + if anchor not in self.anchors: + raise ComposerError(None, None, "found undefined alias %r" + % anchor, event.start_mark) + return self.anchors[anchor] + event = self.peek_event() + anchor = event.anchor + if anchor is not None: + if anchor in self.anchors: + raise ComposerError("found duplicate anchor %r; first occurrence" + % anchor, self.anchors[anchor].start_mark, + "second occurrence", event.start_mark) + self.descend_resolver(parent, index) + if self.check_event(ScalarEvent): + node = self.compose_scalar_node(anchor) + elif self.check_event(SequenceStartEvent): + node = self.compose_sequence_node(anchor) + elif self.check_event(MappingStartEvent): + node = self.compose_mapping_node(anchor) + self.ascend_resolver() + return node + + def compose_scalar_node(self, anchor): + event = self.get_event() + tag = event.tag + if tag is None or tag == '!': + tag = self.resolve(ScalarNode, event.value, event.implicit) + node = ScalarNode(tag, event.value, + event.start_mark, event.end_mark, style=event.style) + if anchor is not None: + self.anchors[anchor] = node + return node + + def compose_sequence_node(self, anchor): + start_event = self.get_event() + tag = start_event.tag + if tag is None or tag == '!': + tag = self.resolve(SequenceNode, None, start_event.implicit) + node = SequenceNode(tag, [], + start_event.start_mark, None, + flow_style=start_event.flow_style) + if anchor is not None: + self.anchors[anchor] = node + index = 0 + while not self.check_event(SequenceEndEvent): + node.value.append(self.compose_node(node, index)) + index += 1 + end_event = self.get_event() + node.end_mark = end_event.end_mark + return node + + def compose_mapping_node(self, anchor): + start_event = self.get_event() + tag = start_event.tag + if tag is None or tag == '!': + tag = self.resolve(MappingNode, None, start_event.implicit) + node = MappingNode(tag, [], + start_event.start_mark, None, + flow_style=start_event.flow_style) + if anchor is not None: + self.anchors[anchor] = node + while not self.check_event(MappingEndEvent): + #key_event = self.peek_event() + item_key = self.compose_node(node, None) + #if item_key in node.value: + # raise ComposerError("while composing a mapping", start_event.start_mark, + # "found duplicate key", key_event.start_mark) + item_value = self.compose_node(node, item_key) + #node.value[item_key] = item_value + node.value.append((item_key, item_value)) + end_event = self.get_event() + node.end_mark = end_event.end_mark + return node + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/constructor.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/constructor.py new file mode 100644 index 0000000..619acd3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/constructor.py @@ -0,0 +1,748 @@ + +__all__ = [ + 'BaseConstructor', + 'SafeConstructor', + 'FullConstructor', + 'UnsafeConstructor', + 'Constructor', + 'ConstructorError' +] + +from .error import * +from .nodes import * + +import collections.abc, datetime, base64, binascii, re, sys, types + +class ConstructorError(MarkedYAMLError): + pass + +class BaseConstructor: + + yaml_constructors = {} + yaml_multi_constructors = {} + + def __init__(self): + self.constructed_objects = {} + self.recursive_objects = {} + self.state_generators = [] + self.deep_construct = False + + def check_data(self): + # If there are more documents available? + return self.check_node() + + def check_state_key(self, key): + """Block special attributes/methods from being set in a newly created + object, to prevent user-controlled methods from being called during + deserialization""" + if self.get_state_keys_blacklist_regexp().match(key): + raise ConstructorError(None, None, + "blacklisted key '%s' in instance state found" % (key,), None) + + def get_data(self): + # Construct and return the next document. + if self.check_node(): + return self.construct_document(self.get_node()) + + def get_single_data(self): + # Ensure that the stream contains a single document and construct it. + node = self.get_single_node() + if node is not None: + return self.construct_document(node) + return None + + def construct_document(self, node): + data = self.construct_object(node) + while self.state_generators: + state_generators = self.state_generators + self.state_generators = [] + for generator in state_generators: + for dummy in generator: + pass + self.constructed_objects = {} + self.recursive_objects = {} + self.deep_construct = False + return data + + def construct_object(self, node, deep=False): + if node in self.constructed_objects: + return self.constructed_objects[node] + if deep: + old_deep = self.deep_construct + self.deep_construct = True + if node in self.recursive_objects: + raise ConstructorError(None, None, + "found unconstructable recursive node", node.start_mark) + self.recursive_objects[node] = None + constructor = None + tag_suffix = None + if node.tag in self.yaml_constructors: + constructor = self.yaml_constructors[node.tag] + else: + for tag_prefix in self.yaml_multi_constructors: + if tag_prefix is not None and node.tag.startswith(tag_prefix): + tag_suffix = node.tag[len(tag_prefix):] + constructor = self.yaml_multi_constructors[tag_prefix] + break + else: + if None in self.yaml_multi_constructors: + tag_suffix = node.tag + constructor = self.yaml_multi_constructors[None] + elif None in self.yaml_constructors: + constructor = self.yaml_constructors[None] + elif isinstance(node, ScalarNode): + constructor = self.__class__.construct_scalar + elif isinstance(node, SequenceNode): + constructor = self.__class__.construct_sequence + elif isinstance(node, MappingNode): + constructor = self.__class__.construct_mapping + if tag_suffix is None: + data = constructor(self, node) + else: + data = constructor(self, tag_suffix, node) + if isinstance(data, types.GeneratorType): + generator = data + data = next(generator) + if self.deep_construct: + for dummy in generator: + pass + else: + self.state_generators.append(generator) + self.constructed_objects[node] = data + del self.recursive_objects[node] + if deep: + self.deep_construct = old_deep + return data + + def construct_scalar(self, node): + if not isinstance(node, ScalarNode): + raise ConstructorError(None, None, + "expected a scalar node, but found %s" % node.id, + node.start_mark) + return node.value + + def construct_sequence(self, node, deep=False): + if not isinstance(node, SequenceNode): + raise ConstructorError(None, None, + "expected a sequence node, but found %s" % node.id, + node.start_mark) + return [self.construct_object(child, deep=deep) + for child in node.value] + + def construct_mapping(self, node, deep=False): + if not isinstance(node, MappingNode): + raise ConstructorError(None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) + mapping = {} + for key_node, value_node in node.value: + key = self.construct_object(key_node, deep=deep) + if not isinstance(key, collections.abc.Hashable): + raise ConstructorError("while constructing a mapping", node.start_mark, + "found unhashable key", key_node.start_mark) + value = self.construct_object(value_node, deep=deep) + mapping[key] = value + return mapping + + def construct_pairs(self, node, deep=False): + if not isinstance(node, MappingNode): + raise ConstructorError(None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) + pairs = [] + for key_node, value_node in node.value: + key = self.construct_object(key_node, deep=deep) + value = self.construct_object(value_node, deep=deep) + pairs.append((key, value)) + return pairs + + @classmethod + def add_constructor(cls, tag, constructor): + if not 'yaml_constructors' in cls.__dict__: + cls.yaml_constructors = cls.yaml_constructors.copy() + cls.yaml_constructors[tag] = constructor + + @classmethod + def add_multi_constructor(cls, tag_prefix, multi_constructor): + if not 'yaml_multi_constructors' in cls.__dict__: + cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() + cls.yaml_multi_constructors[tag_prefix] = multi_constructor + +class SafeConstructor(BaseConstructor): + + def construct_scalar(self, node): + if isinstance(node, MappingNode): + for key_node, value_node in node.value: + if key_node.tag == 'tag:yaml.org,2002:value': + return self.construct_scalar(value_node) + return super().construct_scalar(node) + + def flatten_mapping(self, node): + merge = [] + index = 0 + while index < len(node.value): + key_node, value_node = node.value[index] + if key_node.tag == 'tag:yaml.org,2002:merge': + del node.value[index] + if isinstance(value_node, MappingNode): + self.flatten_mapping(value_node) + merge.extend(value_node.value) + elif isinstance(value_node, SequenceNode): + submerge = [] + for subnode in value_node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing a mapping", + node.start_mark, + "expected a mapping for merging, but found %s" + % subnode.id, subnode.start_mark) + self.flatten_mapping(subnode) + submerge.append(subnode.value) + submerge.reverse() + for value in submerge: + merge.extend(value) + else: + raise ConstructorError("while constructing a mapping", node.start_mark, + "expected a mapping or list of mappings for merging, but found %s" + % value_node.id, value_node.start_mark) + elif key_node.tag == 'tag:yaml.org,2002:value': + key_node.tag = 'tag:yaml.org,2002:str' + index += 1 + else: + index += 1 + if merge: + node.value = merge + node.value + + def construct_mapping(self, node, deep=False): + if isinstance(node, MappingNode): + self.flatten_mapping(node) + return super().construct_mapping(node, deep=deep) + + def construct_yaml_null(self, node): + self.construct_scalar(node) + return None + + bool_values = { + 'yes': True, + 'no': False, + 'true': True, + 'false': False, + 'on': True, + 'off': False, + } + + def construct_yaml_bool(self, node): + value = self.construct_scalar(node) + return self.bool_values[value.lower()] + + def construct_yaml_int(self, node): + value = self.construct_scalar(node) + value = value.replace('_', '') + sign = +1 + if value[0] == '-': + sign = -1 + if value[0] in '+-': + value = value[1:] + if value == '0': + return 0 + elif value.startswith('0b'): + return sign*int(value[2:], 2) + elif value.startswith('0x'): + return sign*int(value[2:], 16) + elif value[0] == '0': + return sign*int(value, 8) + elif ':' in value: + digits = [int(part) for part in value.split(':')] + digits.reverse() + base = 1 + value = 0 + for digit in digits: + value += digit*base + base *= 60 + return sign*value + else: + return sign*int(value) + + inf_value = 1e300 + while inf_value != inf_value*inf_value: + inf_value *= inf_value + nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99). + + def construct_yaml_float(self, node): + value = self.construct_scalar(node) + value = value.replace('_', '').lower() + sign = +1 + if value[0] == '-': + sign = -1 + if value[0] in '+-': + value = value[1:] + if value == '.inf': + return sign*self.inf_value + elif value == '.nan': + return self.nan_value + elif ':' in value: + digits = [float(part) for part in value.split(':')] + digits.reverse() + base = 1 + value = 0.0 + for digit in digits: + value += digit*base + base *= 60 + return sign*value + else: + return sign*float(value) + + def construct_yaml_binary(self, node): + try: + value = self.construct_scalar(node).encode('ascii') + except UnicodeEncodeError as exc: + raise ConstructorError(None, None, + "failed to convert base64 data into ascii: %s" % exc, + node.start_mark) + try: + if hasattr(base64, 'decodebytes'): + return base64.decodebytes(value) + else: + return base64.decodestring(value) + except binascii.Error as exc: + raise ConstructorError(None, None, + "failed to decode base64 data: %s" % exc, node.start_mark) + + timestamp_regexp = re.compile( + r'''^(?P[0-9][0-9][0-9][0-9]) + -(?P[0-9][0-9]?) + -(?P[0-9][0-9]?) + (?:(?:[Tt]|[ \t]+) + (?P[0-9][0-9]?) + :(?P[0-9][0-9]) + :(?P[0-9][0-9]) + (?:\.(?P[0-9]*))? + (?:[ \t]*(?PZ|(?P[-+])(?P[0-9][0-9]?) + (?::(?P[0-9][0-9]))?))?)?$''', re.X) + + def construct_yaml_timestamp(self, node): + value = self.construct_scalar(node) + match = self.timestamp_regexp.match(node.value) + values = match.groupdict() + year = int(values['year']) + month = int(values['month']) + day = int(values['day']) + if not values['hour']: + return datetime.date(year, month, day) + hour = int(values['hour']) + minute = int(values['minute']) + second = int(values['second']) + fraction = 0 + tzinfo = None + if values['fraction']: + fraction = values['fraction'][:6] + while len(fraction) < 6: + fraction += '0' + fraction = int(fraction) + if values['tz_sign']: + tz_hour = int(values['tz_hour']) + tz_minute = int(values['tz_minute'] or 0) + delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) + if values['tz_sign'] == '-': + delta = -delta + tzinfo = datetime.timezone(delta) + elif values['tz']: + tzinfo = datetime.timezone.utc + return datetime.datetime(year, month, day, hour, minute, second, fraction, + tzinfo=tzinfo) + + def construct_yaml_omap(self, node): + # Note: we do not check for duplicate keys, because it's too + # CPU-expensive. + omap = [] + yield omap + if not isinstance(node, SequenceNode): + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a sequence, but found %s" % node.id, node.start_mark) + for subnode in node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a mapping of length 1, but found %s" % subnode.id, + subnode.start_mark) + if len(subnode.value) != 1: + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a single mapping item, but found %d items" % len(subnode.value), + subnode.start_mark) + key_node, value_node = subnode.value[0] + key = self.construct_object(key_node) + value = self.construct_object(value_node) + omap.append((key, value)) + + def construct_yaml_pairs(self, node): + # Note: the same code as `construct_yaml_omap`. + pairs = [] + yield pairs + if not isinstance(node, SequenceNode): + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a sequence, but found %s" % node.id, node.start_mark) + for subnode in node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a mapping of length 1, but found %s" % subnode.id, + subnode.start_mark) + if len(subnode.value) != 1: + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a single mapping item, but found %d items" % len(subnode.value), + subnode.start_mark) + key_node, value_node = subnode.value[0] + key = self.construct_object(key_node) + value = self.construct_object(value_node) + pairs.append((key, value)) + + def construct_yaml_set(self, node): + data = set() + yield data + value = self.construct_mapping(node) + data.update(value) + + def construct_yaml_str(self, node): + return self.construct_scalar(node) + + def construct_yaml_seq(self, node): + data = [] + yield data + data.extend(self.construct_sequence(node)) + + def construct_yaml_map(self, node): + data = {} + yield data + value = self.construct_mapping(node) + data.update(value) + + def construct_yaml_object(self, node, cls): + data = cls.__new__(cls) + yield data + if hasattr(data, '__setstate__'): + state = self.construct_mapping(node, deep=True) + data.__setstate__(state) + else: + state = self.construct_mapping(node) + data.__dict__.update(state) + + def construct_undefined(self, node): + raise ConstructorError(None, None, + "could not determine a constructor for the tag %r" % node.tag, + node.start_mark) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:null', + SafeConstructor.construct_yaml_null) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:bool', + SafeConstructor.construct_yaml_bool) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:int', + SafeConstructor.construct_yaml_int) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:float', + SafeConstructor.construct_yaml_float) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:binary', + SafeConstructor.construct_yaml_binary) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:timestamp', + SafeConstructor.construct_yaml_timestamp) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:omap', + SafeConstructor.construct_yaml_omap) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:pairs', + SafeConstructor.construct_yaml_pairs) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:set', + SafeConstructor.construct_yaml_set) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:str', + SafeConstructor.construct_yaml_str) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:seq', + SafeConstructor.construct_yaml_seq) + +SafeConstructor.add_constructor( + 'tag:yaml.org,2002:map', + SafeConstructor.construct_yaml_map) + +SafeConstructor.add_constructor(None, + SafeConstructor.construct_undefined) + +class FullConstructor(SafeConstructor): + # 'extend' is blacklisted because it is used by + # construct_python_object_apply to add `listitems` to a newly generate + # python instance + def get_state_keys_blacklist(self): + return ['^extend$', '^__.*__$'] + + def get_state_keys_blacklist_regexp(self): + if not hasattr(self, 'state_keys_blacklist_regexp'): + self.state_keys_blacklist_regexp = re.compile('(' + '|'.join(self.get_state_keys_blacklist()) + ')') + return self.state_keys_blacklist_regexp + + def construct_python_str(self, node): + return self.construct_scalar(node) + + def construct_python_unicode(self, node): + return self.construct_scalar(node) + + def construct_python_bytes(self, node): + try: + value = self.construct_scalar(node).encode('ascii') + except UnicodeEncodeError as exc: + raise ConstructorError(None, None, + "failed to convert base64 data into ascii: %s" % exc, + node.start_mark) + try: + if hasattr(base64, 'decodebytes'): + return base64.decodebytes(value) + else: + return base64.decodestring(value) + except binascii.Error as exc: + raise ConstructorError(None, None, + "failed to decode base64 data: %s" % exc, node.start_mark) + + def construct_python_long(self, node): + return self.construct_yaml_int(node) + + def construct_python_complex(self, node): + return complex(self.construct_scalar(node)) + + def construct_python_tuple(self, node): + return tuple(self.construct_sequence(node)) + + def find_python_module(self, name, mark, unsafe=False): + if not name: + raise ConstructorError("while constructing a Python module", mark, + "expected non-empty name appended to the tag", mark) + if unsafe: + try: + __import__(name) + except ImportError as exc: + raise ConstructorError("while constructing a Python module", mark, + "cannot find module %r (%s)" % (name, exc), mark) + if name not in sys.modules: + raise ConstructorError("while constructing a Python module", mark, + "module %r is not imported" % name, mark) + return sys.modules[name] + + def find_python_name(self, name, mark, unsafe=False): + if not name: + raise ConstructorError("while constructing a Python object", mark, + "expected non-empty name appended to the tag", mark) + if '.' in name: + module_name, object_name = name.rsplit('.', 1) + else: + module_name = 'builtins' + object_name = name + if unsafe: + try: + __import__(module_name) + except ImportError as exc: + raise ConstructorError("while constructing a Python object", mark, + "cannot find module %r (%s)" % (module_name, exc), mark) + if module_name not in sys.modules: + raise ConstructorError("while constructing a Python object", mark, + "module %r is not imported" % module_name, mark) + module = sys.modules[module_name] + if not hasattr(module, object_name): + raise ConstructorError("while constructing a Python object", mark, + "cannot find %r in the module %r" + % (object_name, module.__name__), mark) + return getattr(module, object_name) + + def construct_python_name(self, suffix, node): + value = self.construct_scalar(node) + if value: + raise ConstructorError("while constructing a Python name", node.start_mark, + "expected the empty value, but found %r" % value, node.start_mark) + return self.find_python_name(suffix, node.start_mark) + + def construct_python_module(self, suffix, node): + value = self.construct_scalar(node) + if value: + raise ConstructorError("while constructing a Python module", node.start_mark, + "expected the empty value, but found %r" % value, node.start_mark) + return self.find_python_module(suffix, node.start_mark) + + def make_python_instance(self, suffix, node, + args=None, kwds=None, newobj=False, unsafe=False): + if not args: + args = [] + if not kwds: + kwds = {} + cls = self.find_python_name(suffix, node.start_mark) + if not (unsafe or isinstance(cls, type)): + raise ConstructorError("while constructing a Python instance", node.start_mark, + "expected a class, but found %r" % type(cls), + node.start_mark) + if newobj and isinstance(cls, type): + return cls.__new__(cls, *args, **kwds) + else: + return cls(*args, **kwds) + + def set_python_instance_state(self, instance, state, unsafe=False): + if hasattr(instance, '__setstate__'): + instance.__setstate__(state) + else: + slotstate = {} + if isinstance(state, tuple) and len(state) == 2: + state, slotstate = state + if hasattr(instance, '__dict__'): + if not unsafe and state: + for key in state.keys(): + self.check_state_key(key) + instance.__dict__.update(state) + elif state: + slotstate.update(state) + for key, value in slotstate.items(): + if not unsafe: + self.check_state_key(key) + setattr(instance, key, value) + + def construct_python_object(self, suffix, node): + # Format: + # !!python/object:module.name { ... state ... } + instance = self.make_python_instance(suffix, node, newobj=True) + yield instance + deep = hasattr(instance, '__setstate__') + state = self.construct_mapping(node, deep=deep) + self.set_python_instance_state(instance, state) + + def construct_python_object_apply(self, suffix, node, newobj=False): + # Format: + # !!python/object/apply # (or !!python/object/new) + # args: [ ... arguments ... ] + # kwds: { ... keywords ... } + # state: ... state ... + # listitems: [ ... listitems ... ] + # dictitems: { ... dictitems ... } + # or short format: + # !!python/object/apply [ ... arguments ... ] + # The difference between !!python/object/apply and !!python/object/new + # is how an object is created, check make_python_instance for details. + if isinstance(node, SequenceNode): + args = self.construct_sequence(node, deep=True) + kwds = {} + state = {} + listitems = [] + dictitems = {} + else: + value = self.construct_mapping(node, deep=True) + args = value.get('args', []) + kwds = value.get('kwds', {}) + state = value.get('state', {}) + listitems = value.get('listitems', []) + dictitems = value.get('dictitems', {}) + instance = self.make_python_instance(suffix, node, args, kwds, newobj) + if state: + self.set_python_instance_state(instance, state) + if listitems: + instance.extend(listitems) + if dictitems: + for key in dictitems: + instance[key] = dictitems[key] + return instance + + def construct_python_object_new(self, suffix, node): + return self.construct_python_object_apply(suffix, node, newobj=True) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/none', + FullConstructor.construct_yaml_null) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/bool', + FullConstructor.construct_yaml_bool) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/str', + FullConstructor.construct_python_str) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/unicode', + FullConstructor.construct_python_unicode) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/bytes', + FullConstructor.construct_python_bytes) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/int', + FullConstructor.construct_yaml_int) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/long', + FullConstructor.construct_python_long) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/float', + FullConstructor.construct_yaml_float) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/complex', + FullConstructor.construct_python_complex) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/list', + FullConstructor.construct_yaml_seq) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/tuple', + FullConstructor.construct_python_tuple) + +FullConstructor.add_constructor( + 'tag:yaml.org,2002:python/dict', + FullConstructor.construct_yaml_map) + +FullConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/name:', + FullConstructor.construct_python_name) + +class UnsafeConstructor(FullConstructor): + + def find_python_module(self, name, mark): + return super(UnsafeConstructor, self).find_python_module(name, mark, unsafe=True) + + def find_python_name(self, name, mark): + return super(UnsafeConstructor, self).find_python_name(name, mark, unsafe=True) + + def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False): + return super(UnsafeConstructor, self).make_python_instance( + suffix, node, args, kwds, newobj, unsafe=True) + + def set_python_instance_state(self, instance, state): + return super(UnsafeConstructor, self).set_python_instance_state( + instance, state, unsafe=True) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/module:', + UnsafeConstructor.construct_python_module) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/object:', + UnsafeConstructor.construct_python_object) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/object/new:', + UnsafeConstructor.construct_python_object_new) + +UnsafeConstructor.add_multi_constructor( + 'tag:yaml.org,2002:python/object/apply:', + UnsafeConstructor.construct_python_object_apply) + +# Constructor is same as UnsafeConstructor. Need to leave this in place in case +# people have extended it directly. +class Constructor(UnsafeConstructor): + pass diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/cyaml.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/cyaml.py new file mode 100644 index 0000000..0c21345 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/cyaml.py @@ -0,0 +1,101 @@ + +__all__ = [ + 'CBaseLoader', 'CSafeLoader', 'CFullLoader', 'CUnsafeLoader', 'CLoader', + 'CBaseDumper', 'CSafeDumper', 'CDumper' +] + +from yaml._yaml import CParser, CEmitter + +from .constructor import * + +from .serializer import * +from .representer import * + +from .resolver import * + +class CBaseLoader(CParser, BaseConstructor, BaseResolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + BaseConstructor.__init__(self) + BaseResolver.__init__(self) + +class CSafeLoader(CParser, SafeConstructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + SafeConstructor.__init__(self) + Resolver.__init__(self) + +class CFullLoader(CParser, FullConstructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + FullConstructor.__init__(self) + Resolver.__init__(self) + +class CUnsafeLoader(CParser, UnsafeConstructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + UnsafeConstructor.__init__(self) + Resolver.__init__(self) + +class CLoader(CParser, Constructor, Resolver): + + def __init__(self, stream): + CParser.__init__(self, stream) + Constructor.__init__(self) + Resolver.__init__(self) + +class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + CEmitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, encoding=encoding, + allow_unicode=allow_unicode, line_break=line_break, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class CSafeDumper(CEmitter, SafeRepresenter, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + CEmitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, encoding=encoding, + allow_unicode=allow_unicode, line_break=line_break, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + SafeRepresenter.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class CDumper(CEmitter, Serializer, Representer, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + CEmitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, encoding=encoding, + allow_unicode=allow_unicode, line_break=line_break, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/dumper.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/dumper.py new file mode 100644 index 0000000..6aadba5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/dumper.py @@ -0,0 +1,62 @@ + +__all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] + +from .emitter import * +from .serializer import * +from .representer import * +from .resolver import * + +class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + SafeRepresenter.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + +class Dumper(Emitter, Serializer, Representer, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=False, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None, sort_keys=True): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style, sort_keys=sort_keys) + Resolver.__init__(self) + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/emitter.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/emitter.py new file mode 100644 index 0000000..a664d01 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/emitter.py @@ -0,0 +1,1137 @@ + +# Emitter expects events obeying the following grammar: +# stream ::= STREAM-START document* STREAM-END +# document ::= DOCUMENT-START node DOCUMENT-END +# node ::= SCALAR | sequence | mapping +# sequence ::= SEQUENCE-START node* SEQUENCE-END +# mapping ::= MAPPING-START (node node)* MAPPING-END + +__all__ = ['Emitter', 'EmitterError'] + +from .error import YAMLError +from .events import * + +class EmitterError(YAMLError): + pass + +class ScalarAnalysis: + def __init__(self, scalar, empty, multiline, + allow_flow_plain, allow_block_plain, + allow_single_quoted, allow_double_quoted, + allow_block): + self.scalar = scalar + self.empty = empty + self.multiline = multiline + self.allow_flow_plain = allow_flow_plain + self.allow_block_plain = allow_block_plain + self.allow_single_quoted = allow_single_quoted + self.allow_double_quoted = allow_double_quoted + self.allow_block = allow_block + +class Emitter: + + DEFAULT_TAG_PREFIXES = { + '!' : '!', + 'tag:yaml.org,2002:' : '!!', + } + + def __init__(self, stream, canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None): + + # The stream should have the methods `write` and possibly `flush`. + self.stream = stream + + # Encoding can be overridden by STREAM-START. + self.encoding = None + + # Emitter is a state machine with a stack of states to handle nested + # structures. + self.states = [] + self.state = self.expect_stream_start + + # Current event and the event queue. + self.events = [] + self.event = None + + # The current indentation level and the stack of previous indents. + self.indents = [] + self.indent = None + + # Flow level. + self.flow_level = 0 + + # Contexts. + self.root_context = False + self.sequence_context = False + self.mapping_context = False + self.simple_key_context = False + + # Characteristics of the last emitted character: + # - current position. + # - is it a whitespace? + # - is it an indention character + # (indentation space, '-', '?', or ':')? + self.line = 0 + self.column = 0 + self.whitespace = True + self.indention = True + + # Whether the document requires an explicit document indicator + self.open_ended = False + + # Formatting details. + self.canonical = canonical + self.allow_unicode = allow_unicode + self.best_indent = 2 + if indent and 1 < indent < 10: + self.best_indent = indent + self.best_width = 80 + if width and width > self.best_indent*2: + self.best_width = width + self.best_line_break = '\n' + if line_break in ['\r', '\n', '\r\n']: + self.best_line_break = line_break + + # Tag prefixes. + self.tag_prefixes = None + + # Prepared anchor and tag. + self.prepared_anchor = None + self.prepared_tag = None + + # Scalar analysis and style. + self.analysis = None + self.style = None + + def dispose(self): + # Reset the state attributes (to clear self-references) + self.states = [] + self.state = None + + def emit(self, event): + self.events.append(event) + while not self.need_more_events(): + self.event = self.events.pop(0) + self.state() + self.event = None + + # In some cases, we wait for a few next events before emitting. + + def need_more_events(self): + if not self.events: + return True + event = self.events[0] + if isinstance(event, DocumentStartEvent): + return self.need_events(1) + elif isinstance(event, SequenceStartEvent): + return self.need_events(2) + elif isinstance(event, MappingStartEvent): + return self.need_events(3) + else: + return False + + def need_events(self, count): + level = 0 + for event in self.events[1:]: + if isinstance(event, (DocumentStartEvent, CollectionStartEvent)): + level += 1 + elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)): + level -= 1 + elif isinstance(event, StreamEndEvent): + level = -1 + if level < 0: + return False + return (len(self.events) < count+1) + + def increase_indent(self, flow=False, indentless=False): + self.indents.append(self.indent) + if self.indent is None: + if flow: + self.indent = self.best_indent + else: + self.indent = 0 + elif not indentless: + self.indent += self.best_indent + + # States. + + # Stream handlers. + + def expect_stream_start(self): + if isinstance(self.event, StreamStartEvent): + if self.event.encoding and not hasattr(self.stream, 'encoding'): + self.encoding = self.event.encoding + self.write_stream_start() + self.state = self.expect_first_document_start + else: + raise EmitterError("expected StreamStartEvent, but got %s" + % self.event) + + def expect_nothing(self): + raise EmitterError("expected nothing, but got %s" % self.event) + + # Document handlers. + + def expect_first_document_start(self): + return self.expect_document_start(first=True) + + def expect_document_start(self, first=False): + if isinstance(self.event, DocumentStartEvent): + if (self.event.version or self.event.tags) and self.open_ended: + self.write_indicator('...', True) + self.write_indent() + if self.event.version: + version_text = self.prepare_version(self.event.version) + self.write_version_directive(version_text) + self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy() + if self.event.tags: + handles = sorted(self.event.tags.keys()) + for handle in handles: + prefix = self.event.tags[handle] + self.tag_prefixes[prefix] = handle + handle_text = self.prepare_tag_handle(handle) + prefix_text = self.prepare_tag_prefix(prefix) + self.write_tag_directive(handle_text, prefix_text) + implicit = (first and not self.event.explicit and not self.canonical + and not self.event.version and not self.event.tags + and not self.check_empty_document()) + if not implicit: + self.write_indent() + self.write_indicator('---', True) + if self.canonical: + self.write_indent() + self.state = self.expect_document_root + elif isinstance(self.event, StreamEndEvent): + if self.open_ended: + self.write_indicator('...', True) + self.write_indent() + self.write_stream_end() + self.state = self.expect_nothing + else: + raise EmitterError("expected DocumentStartEvent, but got %s" + % self.event) + + def expect_document_end(self): + if isinstance(self.event, DocumentEndEvent): + self.write_indent() + if self.event.explicit: + self.write_indicator('...', True) + self.write_indent() + self.flush_stream() + self.state = self.expect_document_start + else: + raise EmitterError("expected DocumentEndEvent, but got %s" + % self.event) + + def expect_document_root(self): + self.states.append(self.expect_document_end) + self.expect_node(root=True) + + # Node handlers. + + def expect_node(self, root=False, sequence=False, mapping=False, + simple_key=False): + self.root_context = root + self.sequence_context = sequence + self.mapping_context = mapping + self.simple_key_context = simple_key + if isinstance(self.event, AliasEvent): + self.expect_alias() + elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)): + self.process_anchor('&') + self.process_tag() + if isinstance(self.event, ScalarEvent): + self.expect_scalar() + elif isinstance(self.event, SequenceStartEvent): + if self.flow_level or self.canonical or self.event.flow_style \ + or self.check_empty_sequence(): + self.expect_flow_sequence() + else: + self.expect_block_sequence() + elif isinstance(self.event, MappingStartEvent): + if self.flow_level or self.canonical or self.event.flow_style \ + or self.check_empty_mapping(): + self.expect_flow_mapping() + else: + self.expect_block_mapping() + else: + raise EmitterError("expected NodeEvent, but got %s" % self.event) + + def expect_alias(self): + if self.event.anchor is None: + raise EmitterError("anchor is not specified for alias") + self.process_anchor('*') + self.state = self.states.pop() + + def expect_scalar(self): + self.increase_indent(flow=True) + self.process_scalar() + self.indent = self.indents.pop() + self.state = self.states.pop() + + # Flow sequence handlers. + + def expect_flow_sequence(self): + self.write_indicator('[', True, whitespace=True) + self.flow_level += 1 + self.increase_indent(flow=True) + self.state = self.expect_first_flow_sequence_item + + def expect_first_flow_sequence_item(self): + if isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + self.write_indicator(']', False) + self.state = self.states.pop() + else: + if self.canonical or self.column > self.best_width: + self.write_indent() + self.states.append(self.expect_flow_sequence_item) + self.expect_node(sequence=True) + + def expect_flow_sequence_item(self): + if isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + if self.canonical: + self.write_indicator(',', False) + self.write_indent() + self.write_indicator(']', False) + self.state = self.states.pop() + else: + self.write_indicator(',', False) + if self.canonical or self.column > self.best_width: + self.write_indent() + self.states.append(self.expect_flow_sequence_item) + self.expect_node(sequence=True) + + # Flow mapping handlers. + + def expect_flow_mapping(self): + self.write_indicator('{', True, whitespace=True) + self.flow_level += 1 + self.increase_indent(flow=True) + self.state = self.expect_first_flow_mapping_key + + def expect_first_flow_mapping_key(self): + if isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + self.write_indicator('}', False) + self.state = self.states.pop() + else: + if self.canonical or self.column > self.best_width: + self.write_indent() + if not self.canonical and self.check_simple_key(): + self.states.append(self.expect_flow_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator('?', True) + self.states.append(self.expect_flow_mapping_value) + self.expect_node(mapping=True) + + def expect_flow_mapping_key(self): + if isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + if self.canonical: + self.write_indicator(',', False) + self.write_indent() + self.write_indicator('}', False) + self.state = self.states.pop() + else: + self.write_indicator(',', False) + if self.canonical or self.column > self.best_width: + self.write_indent() + if not self.canonical and self.check_simple_key(): + self.states.append(self.expect_flow_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator('?', True) + self.states.append(self.expect_flow_mapping_value) + self.expect_node(mapping=True) + + def expect_flow_mapping_simple_value(self): + self.write_indicator(':', False) + self.states.append(self.expect_flow_mapping_key) + self.expect_node(mapping=True) + + def expect_flow_mapping_value(self): + if self.canonical or self.column > self.best_width: + self.write_indent() + self.write_indicator(':', True) + self.states.append(self.expect_flow_mapping_key) + self.expect_node(mapping=True) + + # Block sequence handlers. + + def expect_block_sequence(self): + indentless = (self.mapping_context and not self.indention) + self.increase_indent(flow=False, indentless=indentless) + self.state = self.expect_first_block_sequence_item + + def expect_first_block_sequence_item(self): + return self.expect_block_sequence_item(first=True) + + def expect_block_sequence_item(self, first=False): + if not first and isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.state = self.states.pop() + else: + self.write_indent() + self.write_indicator('-', True, indention=True) + self.states.append(self.expect_block_sequence_item) + self.expect_node(sequence=True) + + # Block mapping handlers. + + def expect_block_mapping(self): + self.increase_indent(flow=False) + self.state = self.expect_first_block_mapping_key + + def expect_first_block_mapping_key(self): + return self.expect_block_mapping_key(first=True) + + def expect_block_mapping_key(self, first=False): + if not first and isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.state = self.states.pop() + else: + self.write_indent() + if self.check_simple_key(): + self.states.append(self.expect_block_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator('?', True, indention=True) + self.states.append(self.expect_block_mapping_value) + self.expect_node(mapping=True) + + def expect_block_mapping_simple_value(self): + self.write_indicator(':', False) + self.states.append(self.expect_block_mapping_key) + self.expect_node(mapping=True) + + def expect_block_mapping_value(self): + self.write_indent() + self.write_indicator(':', True, indention=True) + self.states.append(self.expect_block_mapping_key) + self.expect_node(mapping=True) + + # Checkers. + + def check_empty_sequence(self): + return (isinstance(self.event, SequenceStartEvent) and self.events + and isinstance(self.events[0], SequenceEndEvent)) + + def check_empty_mapping(self): + return (isinstance(self.event, MappingStartEvent) and self.events + and isinstance(self.events[0], MappingEndEvent)) + + def check_empty_document(self): + if not isinstance(self.event, DocumentStartEvent) or not self.events: + return False + event = self.events[0] + return (isinstance(event, ScalarEvent) and event.anchor is None + and event.tag is None and event.implicit and event.value == '') + + def check_simple_key(self): + length = 0 + if isinstance(self.event, NodeEvent) and self.event.anchor is not None: + if self.prepared_anchor is None: + self.prepared_anchor = self.prepare_anchor(self.event.anchor) + length += len(self.prepared_anchor) + if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \ + and self.event.tag is not None: + if self.prepared_tag is None: + self.prepared_tag = self.prepare_tag(self.event.tag) + length += len(self.prepared_tag) + if isinstance(self.event, ScalarEvent): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + length += len(self.analysis.scalar) + return (length < 128 and (isinstance(self.event, AliasEvent) + or (isinstance(self.event, ScalarEvent) + and not self.analysis.empty and not self.analysis.multiline) + or self.check_empty_sequence() or self.check_empty_mapping())) + + # Anchor, Tag, and Scalar processors. + + def process_anchor(self, indicator): + if self.event.anchor is None: + self.prepared_anchor = None + return + if self.prepared_anchor is None: + self.prepared_anchor = self.prepare_anchor(self.event.anchor) + if self.prepared_anchor: + self.write_indicator(indicator+self.prepared_anchor, True) + self.prepared_anchor = None + + def process_tag(self): + tag = self.event.tag + if isinstance(self.event, ScalarEvent): + if self.style is None: + self.style = self.choose_scalar_style() + if ((not self.canonical or tag is None) and + ((self.style == '' and self.event.implicit[0]) + or (self.style != '' and self.event.implicit[1]))): + self.prepared_tag = None + return + if self.event.implicit[0] and tag is None: + tag = '!' + self.prepared_tag = None + else: + if (not self.canonical or tag is None) and self.event.implicit: + self.prepared_tag = None + return + if tag is None: + raise EmitterError("tag is not specified") + if self.prepared_tag is None: + self.prepared_tag = self.prepare_tag(tag) + if self.prepared_tag: + self.write_indicator(self.prepared_tag, True) + self.prepared_tag = None + + def choose_scalar_style(self): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + if self.event.style == '"' or self.canonical: + return '"' + if not self.event.style and self.event.implicit[0]: + if (not (self.simple_key_context and + (self.analysis.empty or self.analysis.multiline)) + and (self.flow_level and self.analysis.allow_flow_plain + or (not self.flow_level and self.analysis.allow_block_plain))): + return '' + if self.event.style and self.event.style in '|>': + if (not self.flow_level and not self.simple_key_context + and self.analysis.allow_block): + return self.event.style + if not self.event.style or self.event.style == '\'': + if (self.analysis.allow_single_quoted and + not (self.simple_key_context and self.analysis.multiline)): + return '\'' + return '"' + + def process_scalar(self): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + if self.style is None: + self.style = self.choose_scalar_style() + split = (not self.simple_key_context) + #if self.analysis.multiline and split \ + # and (not self.style or self.style in '\'\"'): + # self.write_indent() + if self.style == '"': + self.write_double_quoted(self.analysis.scalar, split) + elif self.style == '\'': + self.write_single_quoted(self.analysis.scalar, split) + elif self.style == '>': + self.write_folded(self.analysis.scalar) + elif self.style == '|': + self.write_literal(self.analysis.scalar) + else: + self.write_plain(self.analysis.scalar, split) + self.analysis = None + self.style = None + + # Analyzers. + + def prepare_version(self, version): + major, minor = version + if major != 1: + raise EmitterError("unsupported YAML version: %d.%d" % (major, minor)) + return '%d.%d' % (major, minor) + + def prepare_tag_handle(self, handle): + if not handle: + raise EmitterError("tag handle must not be empty") + if handle[0] != '!' or handle[-1] != '!': + raise EmitterError("tag handle must start and end with '!': %r" % handle) + for ch in handle[1:-1]: + if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_'): + raise EmitterError("invalid character %r in the tag handle: %r" + % (ch, handle)) + return handle + + def prepare_tag_prefix(self, prefix): + if not prefix: + raise EmitterError("tag prefix must not be empty") + chunks = [] + start = end = 0 + if prefix[0] == '!': + end = 1 + while end < len(prefix): + ch = prefix[end] + if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-;/?!:@&=+$,_.~*\'()[]': + end += 1 + else: + if start < end: + chunks.append(prefix[start:end]) + start = end = end+1 + data = ch.encode('utf-8') + for ch in data: + chunks.append('%%%02X' % ord(ch)) + if start < end: + chunks.append(prefix[start:end]) + return ''.join(chunks) + + def prepare_tag(self, tag): + if not tag: + raise EmitterError("tag must not be empty") + if tag == '!': + return tag + handle = None + suffix = tag + prefixes = sorted(self.tag_prefixes.keys()) + for prefix in prefixes: + if tag.startswith(prefix) \ + and (prefix == '!' or len(prefix) < len(tag)): + handle = self.tag_prefixes[prefix] + suffix = tag[len(prefix):] + chunks = [] + start = end = 0 + while end < len(suffix): + ch = suffix[end] + if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-;/?:@&=+$,_.~*\'()[]' \ + or (ch == '!' and handle != '!'): + end += 1 + else: + if start < end: + chunks.append(suffix[start:end]) + start = end = end+1 + data = ch.encode('utf-8') + for ch in data: + chunks.append('%%%02X' % ch) + if start < end: + chunks.append(suffix[start:end]) + suffix_text = ''.join(chunks) + if handle: + return '%s%s' % (handle, suffix_text) + else: + return '!<%s>' % suffix_text + + def prepare_anchor(self, anchor): + if not anchor: + raise EmitterError("anchor must not be empty") + for ch in anchor: + if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_'): + raise EmitterError("invalid character %r in the anchor: %r" + % (ch, anchor)) + return anchor + + def analyze_scalar(self, scalar): + + # Empty scalar is a special case. + if not scalar: + return ScalarAnalysis(scalar=scalar, empty=True, multiline=False, + allow_flow_plain=False, allow_block_plain=True, + allow_single_quoted=True, allow_double_quoted=True, + allow_block=False) + + # Indicators and special characters. + block_indicators = False + flow_indicators = False + line_breaks = False + special_characters = False + + # Important whitespace combinations. + leading_space = False + leading_break = False + trailing_space = False + trailing_break = False + break_space = False + space_break = False + + # Check document indicators. + if scalar.startswith('---') or scalar.startswith('...'): + block_indicators = True + flow_indicators = True + + # First character or preceded by a whitespace. + preceded_by_whitespace = True + + # Last character or followed by a whitespace. + followed_by_whitespace = (len(scalar) == 1 or + scalar[1] in '\0 \t\r\n\x85\u2028\u2029') + + # The previous character is a space. + previous_space = False + + # The previous character is a break. + previous_break = False + + index = 0 + while index < len(scalar): + ch = scalar[index] + + # Check for indicators. + if index == 0: + # Leading indicators are special characters. + if ch in '#,[]{}&*!|>\'\"%@`': + flow_indicators = True + block_indicators = True + if ch in '?:': + flow_indicators = True + if followed_by_whitespace: + block_indicators = True + if ch == '-' and followed_by_whitespace: + flow_indicators = True + block_indicators = True + else: + # Some indicators cannot appear within a scalar as well. + if ch in ',?[]{}': + flow_indicators = True + if ch == ':': + flow_indicators = True + if followed_by_whitespace: + block_indicators = True + if ch == '#' and preceded_by_whitespace: + flow_indicators = True + block_indicators = True + + # Check for line breaks, special, and unicode characters. + if ch in '\n\x85\u2028\u2029': + line_breaks = True + if not (ch == '\n' or '\x20' <= ch <= '\x7E'): + if (ch == '\x85' or '\xA0' <= ch <= '\uD7FF' + or '\uE000' <= ch <= '\uFFFD' + or '\U00010000' <= ch < '\U0010ffff') and ch != '\uFEFF': + unicode_characters = True + if not self.allow_unicode: + special_characters = True + else: + special_characters = True + + # Detect important whitespace combinations. + if ch == ' ': + if index == 0: + leading_space = True + if index == len(scalar)-1: + trailing_space = True + if previous_break: + break_space = True + previous_space = True + previous_break = False + elif ch in '\n\x85\u2028\u2029': + if index == 0: + leading_break = True + if index == len(scalar)-1: + trailing_break = True + if previous_space: + space_break = True + previous_space = False + previous_break = True + else: + previous_space = False + previous_break = False + + # Prepare for the next character. + index += 1 + preceded_by_whitespace = (ch in '\0 \t\r\n\x85\u2028\u2029') + followed_by_whitespace = (index+1 >= len(scalar) or + scalar[index+1] in '\0 \t\r\n\x85\u2028\u2029') + + # Let's decide what styles are allowed. + allow_flow_plain = True + allow_block_plain = True + allow_single_quoted = True + allow_double_quoted = True + allow_block = True + + # Leading and trailing whitespaces are bad for plain scalars. + if (leading_space or leading_break + or trailing_space or trailing_break): + allow_flow_plain = allow_block_plain = False + + # We do not permit trailing spaces for block scalars. + if trailing_space: + allow_block = False + + # Spaces at the beginning of a new line are only acceptable for block + # scalars. + if break_space: + allow_flow_plain = allow_block_plain = allow_single_quoted = False + + # Spaces followed by breaks, as well as special character are only + # allowed for double quoted scalars. + if space_break or special_characters: + allow_flow_plain = allow_block_plain = \ + allow_single_quoted = allow_block = False + + # Although the plain scalar writer supports breaks, we never emit + # multiline plain scalars. + if line_breaks: + allow_flow_plain = allow_block_plain = False + + # Flow indicators are forbidden for flow plain scalars. + if flow_indicators: + allow_flow_plain = False + + # Block indicators are forbidden for block plain scalars. + if block_indicators: + allow_block_plain = False + + return ScalarAnalysis(scalar=scalar, + empty=False, multiline=line_breaks, + allow_flow_plain=allow_flow_plain, + allow_block_plain=allow_block_plain, + allow_single_quoted=allow_single_quoted, + allow_double_quoted=allow_double_quoted, + allow_block=allow_block) + + # Writers. + + def flush_stream(self): + if hasattr(self.stream, 'flush'): + self.stream.flush() + + def write_stream_start(self): + # Write BOM if needed. + if self.encoding and self.encoding.startswith('utf-16'): + self.stream.write('\uFEFF'.encode(self.encoding)) + + def write_stream_end(self): + self.flush_stream() + + def write_indicator(self, indicator, need_whitespace, + whitespace=False, indention=False): + if self.whitespace or not need_whitespace: + data = indicator + else: + data = ' '+indicator + self.whitespace = whitespace + self.indention = self.indention and indention + self.column += len(data) + self.open_ended = False + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_indent(self): + indent = self.indent or 0 + if not self.indention or self.column > indent \ + or (self.column == indent and not self.whitespace): + self.write_line_break() + if self.column < indent: + self.whitespace = True + data = ' '*(indent-self.column) + self.column = indent + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_line_break(self, data=None): + if data is None: + data = self.best_line_break + self.whitespace = True + self.indention = True + self.line += 1 + self.column = 0 + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_version_directive(self, version_text): + data = '%%YAML %s' % version_text + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_line_break() + + def write_tag_directive(self, handle_text, prefix_text): + data = '%%TAG %s %s' % (handle_text, prefix_text) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_line_break() + + # Scalar streams. + + def write_single_quoted(self, text, split=True): + self.write_indicator('\'', True) + spaces = False + breaks = False + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if spaces: + if ch is None or ch != ' ': + if start+1 == end and self.column > self.best_width and split \ + and start != 0 and end != len(text): + self.write_indent() + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + elif breaks: + if ch is None or ch not in '\n\x85\u2028\u2029': + if text[start] == '\n': + self.write_line_break() + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + self.write_indent() + start = end + else: + if ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'': + if start < end: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch == '\'': + data = '\'\'' + self.column += 2 + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + 1 + if ch is not None: + spaces = (ch == ' ') + breaks = (ch in '\n\x85\u2028\u2029') + end += 1 + self.write_indicator('\'', False) + + ESCAPE_REPLACEMENTS = { + '\0': '0', + '\x07': 'a', + '\x08': 'b', + '\x09': 't', + '\x0A': 'n', + '\x0B': 'v', + '\x0C': 'f', + '\x0D': 'r', + '\x1B': 'e', + '\"': '\"', + '\\': '\\', + '\x85': 'N', + '\xA0': '_', + '\u2028': 'L', + '\u2029': 'P', + } + + def write_double_quoted(self, text, split=True): + self.write_indicator('"', True) + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if ch is None or ch in '"\\\x85\u2028\u2029\uFEFF' \ + or not ('\x20' <= ch <= '\x7E' + or (self.allow_unicode + and ('\xA0' <= ch <= '\uD7FF' + or '\uE000' <= ch <= '\uFFFD'))): + if start < end: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch is not None: + if ch in self.ESCAPE_REPLACEMENTS: + data = '\\'+self.ESCAPE_REPLACEMENTS[ch] + elif ch <= '\xFF': + data = '\\x%02X' % ord(ch) + elif ch <= '\uFFFF': + data = '\\u%04X' % ord(ch) + else: + data = '\\U%08X' % ord(ch) + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end+1 + if 0 < end < len(text)-1 and (ch == ' ' or start >= end) \ + and self.column+(end-start) > self.best_width and split: + data = text[start:end]+'\\' + if start < end: + start = end + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_indent() + self.whitespace = False + self.indention = False + if text[start] == ' ': + data = '\\' + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + end += 1 + self.write_indicator('"', False) + + def determine_block_hints(self, text): + hints = '' + if text: + if text[0] in ' \n\x85\u2028\u2029': + hints += str(self.best_indent) + if text[-1] not in '\n\x85\u2028\u2029': + hints += '-' + elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029': + hints += '+' + return hints + + def write_folded(self, text): + hints = self.determine_block_hints(text) + self.write_indicator('>'+hints, True) + if hints[-1:] == '+': + self.open_ended = True + self.write_line_break() + leading_space = True + spaces = False + breaks = True + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if breaks: + if ch is None or ch not in '\n\x85\u2028\u2029': + if not leading_space and ch is not None and ch != ' ' \ + and text[start] == '\n': + self.write_line_break() + leading_space = (ch == ' ') + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + if ch is not None: + self.write_indent() + start = end + elif spaces: + if ch != ' ': + if start+1 == end and self.column > self.best_width: + self.write_indent() + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + else: + if ch is None or ch in ' \n\x85\u2028\u2029': + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + if ch is None: + self.write_line_break() + start = end + if ch is not None: + breaks = (ch in '\n\x85\u2028\u2029') + spaces = (ch == ' ') + end += 1 + + def write_literal(self, text): + hints = self.determine_block_hints(text) + self.write_indicator('|'+hints, True) + if hints[-1:] == '+': + self.open_ended = True + self.write_line_break() + breaks = True + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if breaks: + if ch is None or ch not in '\n\x85\u2028\u2029': + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + if ch is not None: + self.write_indent() + start = end + else: + if ch is None or ch in '\n\x85\u2028\u2029': + data = text[start:end] + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + if ch is None: + self.write_line_break() + start = end + if ch is not None: + breaks = (ch in '\n\x85\u2028\u2029') + end += 1 + + def write_plain(self, text, split=True): + if self.root_context: + self.open_ended = True + if not text: + return + if not self.whitespace: + data = ' ' + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.whitespace = False + self.indention = False + spaces = False + breaks = False + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if spaces: + if ch != ' ': + if start+1 == end and self.column > self.best_width and split: + self.write_indent() + self.whitespace = False + self.indention = False + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + elif breaks: + if ch not in '\n\x85\u2028\u2029': + if text[start] == '\n': + self.write_line_break() + for br in text[start:end]: + if br == '\n': + self.write_line_break() + else: + self.write_line_break(br) + self.write_indent() + self.whitespace = False + self.indention = False + start = end + else: + if ch is None or ch in ' \n\x85\u2028\u2029': + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch is not None: + spaces = (ch == ' ') + breaks = (ch in '\n\x85\u2028\u2029') + end += 1 diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/error.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/error.py new file mode 100644 index 0000000..b796b4d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/error.py @@ -0,0 +1,75 @@ + +__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] + +class Mark: + + def __init__(self, name, index, line, column, buffer, pointer): + self.name = name + self.index = index + self.line = line + self.column = column + self.buffer = buffer + self.pointer = pointer + + def get_snippet(self, indent=4, max_length=75): + if self.buffer is None: + return None + head = '' + start = self.pointer + while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029': + start -= 1 + if self.pointer-start > max_length/2-1: + head = ' ... ' + start += 5 + break + tail = '' + end = self.pointer + while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029': + end += 1 + if end-self.pointer > max_length/2-1: + tail = ' ... ' + end -= 5 + break + snippet = self.buffer[start:end] + return ' '*indent + head + snippet + tail + '\n' \ + + ' '*(indent+self.pointer-start+len(head)) + '^' + + def __str__(self): + snippet = self.get_snippet() + where = " in \"%s\", line %d, column %d" \ + % (self.name, self.line+1, self.column+1) + if snippet is not None: + where += ":\n"+snippet + return where + +class YAMLError(Exception): + pass + +class MarkedYAMLError(YAMLError): + + def __init__(self, context=None, context_mark=None, + problem=None, problem_mark=None, note=None): + self.context = context + self.context_mark = context_mark + self.problem = problem + self.problem_mark = problem_mark + self.note = note + + def __str__(self): + lines = [] + if self.context is not None: + lines.append(self.context) + if self.context_mark is not None \ + and (self.problem is None or self.problem_mark is None + or self.context_mark.name != self.problem_mark.name + or self.context_mark.line != self.problem_mark.line + or self.context_mark.column != self.problem_mark.column): + lines.append(str(self.context_mark)) + if self.problem is not None: + lines.append(self.problem) + if self.problem_mark is not None: + lines.append(str(self.problem_mark)) + if self.note is not None: + lines.append(self.note) + return '\n'.join(lines) + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/events.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/events.py new file mode 100644 index 0000000..f79ad38 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/events.py @@ -0,0 +1,86 @@ + +# Abstract classes. + +class Event(object): + def __init__(self, start_mark=None, end_mark=None): + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] + if hasattr(self, key)] + arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) + for key in attributes]) + return '%s(%s)' % (self.__class__.__name__, arguments) + +class NodeEvent(Event): + def __init__(self, anchor, start_mark=None, end_mark=None): + self.anchor = anchor + self.start_mark = start_mark + self.end_mark = end_mark + +class CollectionStartEvent(NodeEvent): + def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, + flow_style=None): + self.anchor = anchor + self.tag = tag + self.implicit = implicit + self.start_mark = start_mark + self.end_mark = end_mark + self.flow_style = flow_style + +class CollectionEndEvent(Event): + pass + +# Implementations. + +class StreamStartEvent(Event): + def __init__(self, start_mark=None, end_mark=None, encoding=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.encoding = encoding + +class StreamEndEvent(Event): + pass + +class DocumentStartEvent(Event): + def __init__(self, start_mark=None, end_mark=None, + explicit=None, version=None, tags=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.explicit = explicit + self.version = version + self.tags = tags + +class DocumentEndEvent(Event): + def __init__(self, start_mark=None, end_mark=None, + explicit=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.explicit = explicit + +class AliasEvent(NodeEvent): + pass + +class ScalarEvent(NodeEvent): + def __init__(self, anchor, tag, implicit, value, + start_mark=None, end_mark=None, style=None): + self.anchor = anchor + self.tag = tag + self.implicit = implicit + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + +class SequenceStartEvent(CollectionStartEvent): + pass + +class SequenceEndEvent(CollectionEndEvent): + pass + +class MappingStartEvent(CollectionStartEvent): + pass + +class MappingEndEvent(CollectionEndEvent): + pass + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/loader.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/loader.py new file mode 100644 index 0000000..e90c112 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/loader.py @@ -0,0 +1,63 @@ + +__all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader'] + +from .reader import * +from .scanner import * +from .parser import * +from .composer import * +from .constructor import * +from .resolver import * + +class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + BaseConstructor.__init__(self) + BaseResolver.__init__(self) + +class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + FullConstructor.__init__(self) + Resolver.__init__(self) + +class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + SafeConstructor.__init__(self) + Resolver.__init__(self) + +class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + Constructor.__init__(self) + Resolver.__init__(self) + +# UnsafeLoader is the same as Loader (which is and was always unsafe on +# untrusted input). Use of either Loader or UnsafeLoader should be rare, since +# FullLoad should be able to load almost all YAML safely. Loader is left intact +# to ensure backwards compatibility. +class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + Constructor.__init__(self) + Resolver.__init__(self) diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/nodes.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/nodes.py new file mode 100644 index 0000000..c4f070c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/nodes.py @@ -0,0 +1,49 @@ + +class Node(object): + def __init__(self, tag, value, start_mark, end_mark): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + value = self.value + #if isinstance(value, list): + # if len(value) == 0: + # value = '' + # elif len(value) == 1: + # value = '<1 item>' + # else: + # value = '<%d items>' % len(value) + #else: + # if len(value) > 75: + # value = repr(value[:70]+u' ... ') + # else: + # value = repr(value) + value = repr(value) + return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) + +class ScalarNode(Node): + id = 'scalar' + def __init__(self, tag, value, + start_mark=None, end_mark=None, style=None): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + +class CollectionNode(Node): + def __init__(self, tag, value, + start_mark=None, end_mark=None, flow_style=None): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.flow_style = flow_style + +class SequenceNode(CollectionNode): + id = 'sequence' + +class MappingNode(CollectionNode): + id = 'mapping' + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/parser.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/parser.py new file mode 100644 index 0000000..13a5995 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/parser.py @@ -0,0 +1,589 @@ + +# The following YAML grammar is LL(1) and is parsed by a recursive descent +# parser. +# +# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END +# implicit_document ::= block_node DOCUMENT-END* +# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +# block_node_or_indentless_sequence ::= +# ALIAS +# | properties (block_content | indentless_block_sequence)? +# | block_content +# | indentless_block_sequence +# block_node ::= ALIAS +# | properties block_content? +# | block_content +# flow_node ::= ALIAS +# | properties flow_content? +# | flow_content +# properties ::= TAG ANCHOR? | ANCHOR TAG? +# block_content ::= block_collection | flow_collection | SCALAR +# flow_content ::= flow_collection | SCALAR +# block_collection ::= block_sequence | block_mapping +# flow_collection ::= flow_sequence | flow_mapping +# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END +# indentless_sequence ::= (BLOCK-ENTRY block_node?)+ +# block_mapping ::= BLOCK-MAPPING_START +# ((KEY block_node_or_indentless_sequence?)? +# (VALUE block_node_or_indentless_sequence?)?)* +# BLOCK-END +# flow_sequence ::= FLOW-SEQUENCE-START +# (flow_sequence_entry FLOW-ENTRY)* +# flow_sequence_entry? +# FLOW-SEQUENCE-END +# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +# flow_mapping ::= FLOW-MAPPING-START +# (flow_mapping_entry FLOW-ENTRY)* +# flow_mapping_entry? +# FLOW-MAPPING-END +# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +# +# FIRST sets: +# +# stream: { STREAM-START } +# explicit_document: { DIRECTIVE DOCUMENT-START } +# implicit_document: FIRST(block_node) +# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } +# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } +# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } +# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } +# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } +# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } +# block_sequence: { BLOCK-SEQUENCE-START } +# block_mapping: { BLOCK-MAPPING-START } +# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } +# indentless_sequence: { ENTRY } +# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } +# flow_sequence: { FLOW-SEQUENCE-START } +# flow_mapping: { FLOW-MAPPING-START } +# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } +# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } + +__all__ = ['Parser', 'ParserError'] + +from .error import MarkedYAMLError +from .tokens import * +from .events import * +from .scanner import * + +class ParserError(MarkedYAMLError): + pass + +class Parser: + # Since writing a recursive-descendant parser is a straightforward task, we + # do not give many comments here. + + DEFAULT_TAGS = { + '!': '!', + '!!': 'tag:yaml.org,2002:', + } + + def __init__(self): + self.current_event = None + self.yaml_version = None + self.tag_handles = {} + self.states = [] + self.marks = [] + self.state = self.parse_stream_start + + def dispose(self): + # Reset the state attributes (to clear self-references) + self.states = [] + self.state = None + + def check_event(self, *choices): + # Check the type of the next event. + if self.current_event is None: + if self.state: + self.current_event = self.state() + if self.current_event is not None: + if not choices: + return True + for choice in choices: + if isinstance(self.current_event, choice): + return True + return False + + def peek_event(self): + # Get the next event. + if self.current_event is None: + if self.state: + self.current_event = self.state() + return self.current_event + + def get_event(self): + # Get the next event and proceed further. + if self.current_event is None: + if self.state: + self.current_event = self.state() + value = self.current_event + self.current_event = None + return value + + # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END + # implicit_document ::= block_node DOCUMENT-END* + # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* + + def parse_stream_start(self): + + # Parse the stream start. + token = self.get_token() + event = StreamStartEvent(token.start_mark, token.end_mark, + encoding=token.encoding) + + # Prepare the next state. + self.state = self.parse_implicit_document_start + + return event + + def parse_implicit_document_start(self): + + # Parse an implicit document. + if not self.check_token(DirectiveToken, DocumentStartToken, + StreamEndToken): + self.tag_handles = self.DEFAULT_TAGS + token = self.peek_token() + start_mark = end_mark = token.start_mark + event = DocumentStartEvent(start_mark, end_mark, + explicit=False) + + # Prepare the next state. + self.states.append(self.parse_document_end) + self.state = self.parse_block_node + + return event + + else: + return self.parse_document_start() + + def parse_document_start(self): + + # Parse any extra document end indicators. + while self.check_token(DocumentEndToken): + self.get_token() + + # Parse an explicit document. + if not self.check_token(StreamEndToken): + token = self.peek_token() + start_mark = token.start_mark + version, tags = self.process_directives() + if not self.check_token(DocumentStartToken): + raise ParserError(None, None, + "expected '', but found %r" + % self.peek_token().id, + self.peek_token().start_mark) + token = self.get_token() + end_mark = token.end_mark + event = DocumentStartEvent(start_mark, end_mark, + explicit=True, version=version, tags=tags) + self.states.append(self.parse_document_end) + self.state = self.parse_document_content + else: + # Parse the end of the stream. + token = self.get_token() + event = StreamEndEvent(token.start_mark, token.end_mark) + assert not self.states + assert not self.marks + self.state = None + return event + + def parse_document_end(self): + + # Parse the document end. + token = self.peek_token() + start_mark = end_mark = token.start_mark + explicit = False + if self.check_token(DocumentEndToken): + token = self.get_token() + end_mark = token.end_mark + explicit = True + event = DocumentEndEvent(start_mark, end_mark, + explicit=explicit) + + # Prepare the next state. + self.state = self.parse_document_start + + return event + + def parse_document_content(self): + if self.check_token(DirectiveToken, + DocumentStartToken, DocumentEndToken, StreamEndToken): + event = self.process_empty_scalar(self.peek_token().start_mark) + self.state = self.states.pop() + return event + else: + return self.parse_block_node() + + def process_directives(self): + self.yaml_version = None + self.tag_handles = {} + while self.check_token(DirectiveToken): + token = self.get_token() + if token.name == 'YAML': + if self.yaml_version is not None: + raise ParserError(None, None, + "found duplicate YAML directive", token.start_mark) + major, minor = token.value + if major != 1: + raise ParserError(None, None, + "found incompatible YAML document (version 1.* is required)", + token.start_mark) + self.yaml_version = token.value + elif token.name == 'TAG': + handle, prefix = token.value + if handle in self.tag_handles: + raise ParserError(None, None, + "duplicate tag handle %r" % handle, + token.start_mark) + self.tag_handles[handle] = prefix + if self.tag_handles: + value = self.yaml_version, self.tag_handles.copy() + else: + value = self.yaml_version, None + for key in self.DEFAULT_TAGS: + if key not in self.tag_handles: + self.tag_handles[key] = self.DEFAULT_TAGS[key] + return value + + # block_node_or_indentless_sequence ::= ALIAS + # | properties (block_content | indentless_block_sequence)? + # | block_content + # | indentless_block_sequence + # block_node ::= ALIAS + # | properties block_content? + # | block_content + # flow_node ::= ALIAS + # | properties flow_content? + # | flow_content + # properties ::= TAG ANCHOR? | ANCHOR TAG? + # block_content ::= block_collection | flow_collection | SCALAR + # flow_content ::= flow_collection | SCALAR + # block_collection ::= block_sequence | block_mapping + # flow_collection ::= flow_sequence | flow_mapping + + def parse_block_node(self): + return self.parse_node(block=True) + + def parse_flow_node(self): + return self.parse_node() + + def parse_block_node_or_indentless_sequence(self): + return self.parse_node(block=True, indentless_sequence=True) + + def parse_node(self, block=False, indentless_sequence=False): + if self.check_token(AliasToken): + token = self.get_token() + event = AliasEvent(token.value, token.start_mark, token.end_mark) + self.state = self.states.pop() + else: + anchor = None + tag = None + start_mark = end_mark = tag_mark = None + if self.check_token(AnchorToken): + token = self.get_token() + start_mark = token.start_mark + end_mark = token.end_mark + anchor = token.value + if self.check_token(TagToken): + token = self.get_token() + tag_mark = token.start_mark + end_mark = token.end_mark + tag = token.value + elif self.check_token(TagToken): + token = self.get_token() + start_mark = tag_mark = token.start_mark + end_mark = token.end_mark + tag = token.value + if self.check_token(AnchorToken): + token = self.get_token() + end_mark = token.end_mark + anchor = token.value + if tag is not None: + handle, suffix = tag + if handle is not None: + if handle not in self.tag_handles: + raise ParserError("while parsing a node", start_mark, + "found undefined tag handle %r" % handle, + tag_mark) + tag = self.tag_handles[handle]+suffix + else: + tag = suffix + #if tag == '!': + # raise ParserError("while parsing a node", start_mark, + # "found non-specific tag '!'", tag_mark, + # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.") + if start_mark is None: + start_mark = end_mark = self.peek_token().start_mark + event = None + implicit = (tag is None or tag == '!') + if indentless_sequence and self.check_token(BlockEntryToken): + end_mark = self.peek_token().end_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark) + self.state = self.parse_indentless_sequence_entry + else: + if self.check_token(ScalarToken): + token = self.get_token() + end_mark = token.end_mark + if (token.plain and tag is None) or tag == '!': + implicit = (True, False) + elif tag is None: + implicit = (False, True) + else: + implicit = (False, False) + event = ScalarEvent(anchor, tag, implicit, token.value, + start_mark, end_mark, style=token.style) + self.state = self.states.pop() + elif self.check_token(FlowSequenceStartToken): + end_mark = self.peek_token().end_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=True) + self.state = self.parse_flow_sequence_first_entry + elif self.check_token(FlowMappingStartToken): + end_mark = self.peek_token().end_mark + event = MappingStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=True) + self.state = self.parse_flow_mapping_first_key + elif block and self.check_token(BlockSequenceStartToken): + end_mark = self.peek_token().start_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=False) + self.state = self.parse_block_sequence_first_entry + elif block and self.check_token(BlockMappingStartToken): + end_mark = self.peek_token().start_mark + event = MappingStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=False) + self.state = self.parse_block_mapping_first_key + elif anchor is not None or tag is not None: + # Empty scalars are allowed even if a tag or an anchor is + # specified. + event = ScalarEvent(anchor, tag, (implicit, False), '', + start_mark, end_mark) + self.state = self.states.pop() + else: + if block: + node = 'block' + else: + node = 'flow' + token = self.peek_token() + raise ParserError("while parsing a %s node" % node, start_mark, + "expected the node content, but found %r" % token.id, + token.start_mark) + return event + + # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END + + def parse_block_sequence_first_entry(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_block_sequence_entry() + + def parse_block_sequence_entry(self): + if self.check_token(BlockEntryToken): + token = self.get_token() + if not self.check_token(BlockEntryToken, BlockEndToken): + self.states.append(self.parse_block_sequence_entry) + return self.parse_block_node() + else: + self.state = self.parse_block_sequence_entry + return self.process_empty_scalar(token.end_mark) + if not self.check_token(BlockEndToken): + token = self.peek_token() + raise ParserError("while parsing a block collection", self.marks[-1], + "expected , but found %r" % token.id, token.start_mark) + token = self.get_token() + event = SequenceEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ + + def parse_indentless_sequence_entry(self): + if self.check_token(BlockEntryToken): + token = self.get_token() + if not self.check_token(BlockEntryToken, + KeyToken, ValueToken, BlockEndToken): + self.states.append(self.parse_indentless_sequence_entry) + return self.parse_block_node() + else: + self.state = self.parse_indentless_sequence_entry + return self.process_empty_scalar(token.end_mark) + token = self.peek_token() + event = SequenceEndEvent(token.start_mark, token.start_mark) + self.state = self.states.pop() + return event + + # block_mapping ::= BLOCK-MAPPING_START + # ((KEY block_node_or_indentless_sequence?)? + # (VALUE block_node_or_indentless_sequence?)?)* + # BLOCK-END + + def parse_block_mapping_first_key(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_block_mapping_key() + + def parse_block_mapping_key(self): + if self.check_token(KeyToken): + token = self.get_token() + if not self.check_token(KeyToken, ValueToken, BlockEndToken): + self.states.append(self.parse_block_mapping_value) + return self.parse_block_node_or_indentless_sequence() + else: + self.state = self.parse_block_mapping_value + return self.process_empty_scalar(token.end_mark) + if not self.check_token(BlockEndToken): + token = self.peek_token() + raise ParserError("while parsing a block mapping", self.marks[-1], + "expected , but found %r" % token.id, token.start_mark) + token = self.get_token() + event = MappingEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + def parse_block_mapping_value(self): + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(KeyToken, ValueToken, BlockEndToken): + self.states.append(self.parse_block_mapping_key) + return self.parse_block_node_or_indentless_sequence() + else: + self.state = self.parse_block_mapping_key + return self.process_empty_scalar(token.end_mark) + else: + self.state = self.parse_block_mapping_key + token = self.peek_token() + return self.process_empty_scalar(token.start_mark) + + # flow_sequence ::= FLOW-SEQUENCE-START + # (flow_sequence_entry FLOW-ENTRY)* + # flow_sequence_entry? + # FLOW-SEQUENCE-END + # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + # + # Note that while production rules for both flow_sequence_entry and + # flow_mapping_entry are equal, their interpretations are different. + # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` + # generate an inline mapping (set syntax). + + def parse_flow_sequence_first_entry(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_flow_sequence_entry(first=True) + + def parse_flow_sequence_entry(self, first=False): + if not self.check_token(FlowSequenceEndToken): + if not first: + if self.check_token(FlowEntryToken): + self.get_token() + else: + token = self.peek_token() + raise ParserError("while parsing a flow sequence", self.marks[-1], + "expected ',' or ']', but got %r" % token.id, token.start_mark) + + if self.check_token(KeyToken): + token = self.peek_token() + event = MappingStartEvent(None, None, True, + token.start_mark, token.end_mark, + flow_style=True) + self.state = self.parse_flow_sequence_entry_mapping_key + return event + elif not self.check_token(FlowSequenceEndToken): + self.states.append(self.parse_flow_sequence_entry) + return self.parse_flow_node() + token = self.get_token() + event = SequenceEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + def parse_flow_sequence_entry_mapping_key(self): + token = self.get_token() + if not self.check_token(ValueToken, + FlowEntryToken, FlowSequenceEndToken): + self.states.append(self.parse_flow_sequence_entry_mapping_value) + return self.parse_flow_node() + else: + self.state = self.parse_flow_sequence_entry_mapping_value + return self.process_empty_scalar(token.end_mark) + + def parse_flow_sequence_entry_mapping_value(self): + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(FlowEntryToken, FlowSequenceEndToken): + self.states.append(self.parse_flow_sequence_entry_mapping_end) + return self.parse_flow_node() + else: + self.state = self.parse_flow_sequence_entry_mapping_end + return self.process_empty_scalar(token.end_mark) + else: + self.state = self.parse_flow_sequence_entry_mapping_end + token = self.peek_token() + return self.process_empty_scalar(token.start_mark) + + def parse_flow_sequence_entry_mapping_end(self): + self.state = self.parse_flow_sequence_entry + token = self.peek_token() + return MappingEndEvent(token.start_mark, token.start_mark) + + # flow_mapping ::= FLOW-MAPPING-START + # (flow_mapping_entry FLOW-ENTRY)* + # flow_mapping_entry? + # FLOW-MAPPING-END + # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + + def parse_flow_mapping_first_key(self): + token = self.get_token() + self.marks.append(token.start_mark) + return self.parse_flow_mapping_key(first=True) + + def parse_flow_mapping_key(self, first=False): + if not self.check_token(FlowMappingEndToken): + if not first: + if self.check_token(FlowEntryToken): + self.get_token() + else: + token = self.peek_token() + raise ParserError("while parsing a flow mapping", self.marks[-1], + "expected ',' or '}', but got %r" % token.id, token.start_mark) + if self.check_token(KeyToken): + token = self.get_token() + if not self.check_token(ValueToken, + FlowEntryToken, FlowMappingEndToken): + self.states.append(self.parse_flow_mapping_value) + return self.parse_flow_node() + else: + self.state = self.parse_flow_mapping_value + return self.process_empty_scalar(token.end_mark) + elif not self.check_token(FlowMappingEndToken): + self.states.append(self.parse_flow_mapping_empty_value) + return self.parse_flow_node() + token = self.get_token() + event = MappingEndEvent(token.start_mark, token.end_mark) + self.state = self.states.pop() + self.marks.pop() + return event + + def parse_flow_mapping_value(self): + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(FlowEntryToken, FlowMappingEndToken): + self.states.append(self.parse_flow_mapping_key) + return self.parse_flow_node() + else: + self.state = self.parse_flow_mapping_key + return self.process_empty_scalar(token.end_mark) + else: + self.state = self.parse_flow_mapping_key + token = self.peek_token() + return self.process_empty_scalar(token.start_mark) + + def parse_flow_mapping_empty_value(self): + self.state = self.parse_flow_mapping_key + return self.process_empty_scalar(self.peek_token().start_mark) + + def process_empty_scalar(self, mark): + return ScalarEvent(None, None, (True, False), '', mark, mark) + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/reader.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/reader.py new file mode 100644 index 0000000..774b021 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/reader.py @@ -0,0 +1,185 @@ +# This module contains abstractions for the input stream. You don't have to +# looks further, there are no pretty code. +# +# We define two classes here. +# +# Mark(source, line, column) +# It's just a record and its only use is producing nice error messages. +# Parser does not use it for any other purposes. +# +# Reader(source, data) +# Reader determines the encoding of `data` and converts it to unicode. +# Reader provides the following methods and attributes: +# reader.peek(length=1) - return the next `length` characters +# reader.forward(length=1) - move the current position to `length` characters. +# reader.index - the number of the current character. +# reader.line, stream.column - the line and the column of the current character. + +__all__ = ['Reader', 'ReaderError'] + +from .error import YAMLError, Mark + +import codecs, re + +class ReaderError(YAMLError): + + def __init__(self, name, position, character, encoding, reason): + self.name = name + self.character = character + self.position = position + self.encoding = encoding + self.reason = reason + + def __str__(self): + if isinstance(self.character, bytes): + return "'%s' codec can't decode byte #x%02x: %s\n" \ + " in \"%s\", position %d" \ + % (self.encoding, ord(self.character), self.reason, + self.name, self.position) + else: + return "unacceptable character #x%04x: %s\n" \ + " in \"%s\", position %d" \ + % (self.character, self.reason, + self.name, self.position) + +class Reader(object): + # Reader: + # - determines the data encoding and converts it to a unicode string, + # - checks if characters are in allowed range, + # - adds '\0' to the end. + + # Reader accepts + # - a `bytes` object, + # - a `str` object, + # - a file-like object with its `read` method returning `str`, + # - a file-like object with its `read` method returning `unicode`. + + # Yeah, it's ugly and slow. + + def __init__(self, stream): + self.name = None + self.stream = None + self.stream_pointer = 0 + self.eof = True + self.buffer = '' + self.pointer = 0 + self.raw_buffer = None + self.raw_decode = None + self.encoding = None + self.index = 0 + self.line = 0 + self.column = 0 + if isinstance(stream, str): + self.name = "" + self.check_printable(stream) + self.buffer = stream+'\0' + elif isinstance(stream, bytes): + self.name = "" + self.raw_buffer = stream + self.determine_encoding() + else: + self.stream = stream + self.name = getattr(stream, 'name', "") + self.eof = False + self.raw_buffer = None + self.determine_encoding() + + def peek(self, index=0): + try: + return self.buffer[self.pointer+index] + except IndexError: + self.update(index+1) + return self.buffer[self.pointer+index] + + def prefix(self, length=1): + if self.pointer+length >= len(self.buffer): + self.update(length) + return self.buffer[self.pointer:self.pointer+length] + + def forward(self, length=1): + if self.pointer+length+1 >= len(self.buffer): + self.update(length+1) + while length: + ch = self.buffer[self.pointer] + self.pointer += 1 + self.index += 1 + if ch in '\n\x85\u2028\u2029' \ + or (ch == '\r' and self.buffer[self.pointer] != '\n'): + self.line += 1 + self.column = 0 + elif ch != '\uFEFF': + self.column += 1 + length -= 1 + + def get_mark(self): + if self.stream is None: + return Mark(self.name, self.index, self.line, self.column, + self.buffer, self.pointer) + else: + return Mark(self.name, self.index, self.line, self.column, + None, None) + + def determine_encoding(self): + while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2): + self.update_raw() + if isinstance(self.raw_buffer, bytes): + if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): + self.raw_decode = codecs.utf_16_le_decode + self.encoding = 'utf-16-le' + elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): + self.raw_decode = codecs.utf_16_be_decode + self.encoding = 'utf-16-be' + else: + self.raw_decode = codecs.utf_8_decode + self.encoding = 'utf-8' + self.update(1) + + NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]') + def check_printable(self, data): + match = self.NON_PRINTABLE.search(data) + if match: + character = match.group() + position = self.index+(len(self.buffer)-self.pointer)+match.start() + raise ReaderError(self.name, position, ord(character), + 'unicode', "special characters are not allowed") + + def update(self, length): + if self.raw_buffer is None: + return + self.buffer = self.buffer[self.pointer:] + self.pointer = 0 + while len(self.buffer) < length: + if not self.eof: + self.update_raw() + if self.raw_decode is not None: + try: + data, converted = self.raw_decode(self.raw_buffer, + 'strict', self.eof) + except UnicodeDecodeError as exc: + character = self.raw_buffer[exc.start] + if self.stream is not None: + position = self.stream_pointer-len(self.raw_buffer)+exc.start + else: + position = exc.start + raise ReaderError(self.name, position, character, + exc.encoding, exc.reason) + else: + data = self.raw_buffer + converted = len(data) + self.check_printable(data) + self.buffer += data + self.raw_buffer = self.raw_buffer[converted:] + if self.eof: + self.buffer += '\0' + self.raw_buffer = None + break + + def update_raw(self, size=4096): + data = self.stream.read(size) + if self.raw_buffer is None: + self.raw_buffer = data + else: + self.raw_buffer += data + self.stream_pointer += len(data) + if not data: + self.eof = True diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/representer.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/representer.py new file mode 100644 index 0000000..3b0b192 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/representer.py @@ -0,0 +1,389 @@ + +__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', + 'RepresenterError'] + +from .error import * +from .nodes import * + +import datetime, copyreg, types, base64, collections + +class RepresenterError(YAMLError): + pass + +class BaseRepresenter: + + yaml_representers = {} + yaml_multi_representers = {} + + def __init__(self, default_style=None, default_flow_style=False, sort_keys=True): + self.default_style = default_style + self.sort_keys = sort_keys + self.default_flow_style = default_flow_style + self.represented_objects = {} + self.object_keeper = [] + self.alias_key = None + + def represent(self, data): + node = self.represent_data(data) + self.serialize(node) + self.represented_objects = {} + self.object_keeper = [] + self.alias_key = None + + def represent_data(self, data): + if self.ignore_aliases(data): + self.alias_key = None + else: + self.alias_key = id(data) + if self.alias_key is not None: + if self.alias_key in self.represented_objects: + node = self.represented_objects[self.alias_key] + #if node is None: + # raise RepresenterError("recursive objects are not allowed: %r" % data) + return node + #self.represented_objects[alias_key] = None + self.object_keeper.append(data) + data_types = type(data).__mro__ + if data_types[0] in self.yaml_representers: + node = self.yaml_representers[data_types[0]](self, data) + else: + for data_type in data_types: + if data_type in self.yaml_multi_representers: + node = self.yaml_multi_representers[data_type](self, data) + break + else: + if None in self.yaml_multi_representers: + node = self.yaml_multi_representers[None](self, data) + elif None in self.yaml_representers: + node = self.yaml_representers[None](self, data) + else: + node = ScalarNode(None, str(data)) + #if alias_key is not None: + # self.represented_objects[alias_key] = node + return node + + @classmethod + def add_representer(cls, data_type, representer): + if not 'yaml_representers' in cls.__dict__: + cls.yaml_representers = cls.yaml_representers.copy() + cls.yaml_representers[data_type] = representer + + @classmethod + def add_multi_representer(cls, data_type, representer): + if not 'yaml_multi_representers' in cls.__dict__: + cls.yaml_multi_representers = cls.yaml_multi_representers.copy() + cls.yaml_multi_representers[data_type] = representer + + def represent_scalar(self, tag, value, style=None): + if style is None: + style = self.default_style + node = ScalarNode(tag, value, style=style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + return node + + def represent_sequence(self, tag, sequence, flow_style=None): + value = [] + node = SequenceNode(tag, value, flow_style=flow_style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + best_style = True + for item in sequence: + node_item = self.represent_data(item) + if not (isinstance(node_item, ScalarNode) and not node_item.style): + best_style = False + value.append(node_item) + if flow_style is None: + if self.default_flow_style is not None: + node.flow_style = self.default_flow_style + else: + node.flow_style = best_style + return node + + def represent_mapping(self, tag, mapping, flow_style=None): + value = [] + node = MappingNode(tag, value, flow_style=flow_style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + best_style = True + if hasattr(mapping, 'items'): + mapping = list(mapping.items()) + if self.sort_keys: + try: + mapping = sorted(mapping) + except TypeError: + pass + for item_key, item_value in mapping: + node_key = self.represent_data(item_key) + node_value = self.represent_data(item_value) + if not (isinstance(node_key, ScalarNode) and not node_key.style): + best_style = False + if not (isinstance(node_value, ScalarNode) and not node_value.style): + best_style = False + value.append((node_key, node_value)) + if flow_style is None: + if self.default_flow_style is not None: + node.flow_style = self.default_flow_style + else: + node.flow_style = best_style + return node + + def ignore_aliases(self, data): + return False + +class SafeRepresenter(BaseRepresenter): + + def ignore_aliases(self, data): + if data is None: + return True + if isinstance(data, tuple) and data == (): + return True + if isinstance(data, (str, bytes, bool, int, float)): + return True + + def represent_none(self, data): + return self.represent_scalar('tag:yaml.org,2002:null', 'null') + + def represent_str(self, data): + return self.represent_scalar('tag:yaml.org,2002:str', data) + + def represent_binary(self, data): + if hasattr(base64, 'encodebytes'): + data = base64.encodebytes(data).decode('ascii') + else: + data = base64.encodestring(data).decode('ascii') + return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|') + + def represent_bool(self, data): + if data: + value = 'true' + else: + value = 'false' + return self.represent_scalar('tag:yaml.org,2002:bool', value) + + def represent_int(self, data): + return self.represent_scalar('tag:yaml.org,2002:int', str(data)) + + inf_value = 1e300 + while repr(inf_value) != repr(inf_value*inf_value): + inf_value *= inf_value + + def represent_float(self, data): + if data != data or (data == 0.0 and data == 1.0): + value = '.nan' + elif data == self.inf_value: + value = '.inf' + elif data == -self.inf_value: + value = '-.inf' + else: + value = repr(data).lower() + # Note that in some cases `repr(data)` represents a float number + # without the decimal parts. For instance: + # >>> repr(1e17) + # '1e17' + # Unfortunately, this is not a valid float representation according + # to the definition of the `!!float` tag. We fix this by adding + # '.0' before the 'e' symbol. + if '.' not in value and 'e' in value: + value = value.replace('e', '.0e', 1) + return self.represent_scalar('tag:yaml.org,2002:float', value) + + def represent_list(self, data): + #pairs = (len(data) > 0 and isinstance(data, list)) + #if pairs: + # for item in data: + # if not isinstance(item, tuple) or len(item) != 2: + # pairs = False + # break + #if not pairs: + return self.represent_sequence('tag:yaml.org,2002:seq', data) + #value = [] + #for item_key, item_value in data: + # value.append(self.represent_mapping(u'tag:yaml.org,2002:map', + # [(item_key, item_value)])) + #return SequenceNode(u'tag:yaml.org,2002:pairs', value) + + def represent_dict(self, data): + return self.represent_mapping('tag:yaml.org,2002:map', data) + + def represent_set(self, data): + value = {} + for key in data: + value[key] = None + return self.represent_mapping('tag:yaml.org,2002:set', value) + + def represent_date(self, data): + value = data.isoformat() + return self.represent_scalar('tag:yaml.org,2002:timestamp', value) + + def represent_datetime(self, data): + value = data.isoformat(' ') + return self.represent_scalar('tag:yaml.org,2002:timestamp', value) + + def represent_yaml_object(self, tag, data, cls, flow_style=None): + if hasattr(data, '__getstate__'): + state = data.__getstate__() + else: + state = data.__dict__.copy() + return self.represent_mapping(tag, state, flow_style=flow_style) + + def represent_undefined(self, data): + raise RepresenterError("cannot represent an object", data) + +SafeRepresenter.add_representer(type(None), + SafeRepresenter.represent_none) + +SafeRepresenter.add_representer(str, + SafeRepresenter.represent_str) + +SafeRepresenter.add_representer(bytes, + SafeRepresenter.represent_binary) + +SafeRepresenter.add_representer(bool, + SafeRepresenter.represent_bool) + +SafeRepresenter.add_representer(int, + SafeRepresenter.represent_int) + +SafeRepresenter.add_representer(float, + SafeRepresenter.represent_float) + +SafeRepresenter.add_representer(list, + SafeRepresenter.represent_list) + +SafeRepresenter.add_representer(tuple, + SafeRepresenter.represent_list) + +SafeRepresenter.add_representer(dict, + SafeRepresenter.represent_dict) + +SafeRepresenter.add_representer(set, + SafeRepresenter.represent_set) + +SafeRepresenter.add_representer(datetime.date, + SafeRepresenter.represent_date) + +SafeRepresenter.add_representer(datetime.datetime, + SafeRepresenter.represent_datetime) + +SafeRepresenter.add_representer(None, + SafeRepresenter.represent_undefined) + +class Representer(SafeRepresenter): + + def represent_complex(self, data): + if data.imag == 0.0: + data = '%r' % data.real + elif data.real == 0.0: + data = '%rj' % data.imag + elif data.imag > 0: + data = '%r+%rj' % (data.real, data.imag) + else: + data = '%r%rj' % (data.real, data.imag) + return self.represent_scalar('tag:yaml.org,2002:python/complex', data) + + def represent_tuple(self, data): + return self.represent_sequence('tag:yaml.org,2002:python/tuple', data) + + def represent_name(self, data): + name = '%s.%s' % (data.__module__, data.__name__) + return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '') + + def represent_module(self, data): + return self.represent_scalar( + 'tag:yaml.org,2002:python/module:'+data.__name__, '') + + def represent_object(self, data): + # We use __reduce__ API to save the data. data.__reduce__ returns + # a tuple of length 2-5: + # (function, args, state, listitems, dictitems) + + # For reconstructing, we calls function(*args), then set its state, + # listitems, and dictitems if they are not None. + + # A special case is when function.__name__ == '__newobj__'. In this + # case we create the object with args[0].__new__(*args). + + # Another special case is when __reduce__ returns a string - we don't + # support it. + + # We produce a !!python/object, !!python/object/new or + # !!python/object/apply node. + + cls = type(data) + if cls in copyreg.dispatch_table: + reduce = copyreg.dispatch_table[cls](data) + elif hasattr(data, '__reduce_ex__'): + reduce = data.__reduce_ex__(2) + elif hasattr(data, '__reduce__'): + reduce = data.__reduce__() + else: + raise RepresenterError("cannot represent an object", data) + reduce = (list(reduce)+[None]*5)[:5] + function, args, state, listitems, dictitems = reduce + args = list(args) + if state is None: + state = {} + if listitems is not None: + listitems = list(listitems) + if dictitems is not None: + dictitems = dict(dictitems) + if function.__name__ == '__newobj__': + function = args[0] + args = args[1:] + tag = 'tag:yaml.org,2002:python/object/new:' + newobj = True + else: + tag = 'tag:yaml.org,2002:python/object/apply:' + newobj = False + function_name = '%s.%s' % (function.__module__, function.__name__) + if not args and not listitems and not dictitems \ + and isinstance(state, dict) and newobj: + return self.represent_mapping( + 'tag:yaml.org,2002:python/object:'+function_name, state) + if not listitems and not dictitems \ + and isinstance(state, dict) and not state: + return self.represent_sequence(tag+function_name, args) + value = {} + if args: + value['args'] = args + if state or not isinstance(state, dict): + value['state'] = state + if listitems: + value['listitems'] = listitems + if dictitems: + value['dictitems'] = dictitems + return self.represent_mapping(tag+function_name, value) + + def represent_ordered_dict(self, data): + # Provide uniform representation across different Python versions. + data_type = type(data) + tag = 'tag:yaml.org,2002:python/object/apply:%s.%s' \ + % (data_type.__module__, data_type.__name__) + items = [[key, value] for key, value in data.items()] + return self.represent_sequence(tag, [items]) + +Representer.add_representer(complex, + Representer.represent_complex) + +Representer.add_representer(tuple, + Representer.represent_tuple) + +Representer.add_representer(type, + Representer.represent_name) + +Representer.add_representer(collections.OrderedDict, + Representer.represent_ordered_dict) + +Representer.add_representer(types.FunctionType, + Representer.represent_name) + +Representer.add_representer(types.BuiltinFunctionType, + Representer.represent_name) + +Representer.add_representer(types.ModuleType, + Representer.represent_module) + +Representer.add_multi_representer(object, + Representer.represent_object) + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/resolver.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/resolver.py new file mode 100644 index 0000000..013896d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/resolver.py @@ -0,0 +1,227 @@ + +__all__ = ['BaseResolver', 'Resolver'] + +from .error import * +from .nodes import * + +import re + +class ResolverError(YAMLError): + pass + +class BaseResolver: + + DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str' + DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq' + DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map' + + yaml_implicit_resolvers = {} + yaml_path_resolvers = {} + + def __init__(self): + self.resolver_exact_paths = [] + self.resolver_prefix_paths = [] + + @classmethod + def add_implicit_resolver(cls, tag, regexp, first): + if not 'yaml_implicit_resolvers' in cls.__dict__: + implicit_resolvers = {} + for key in cls.yaml_implicit_resolvers: + implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:] + cls.yaml_implicit_resolvers = implicit_resolvers + if first is None: + first = [None] + for ch in first: + cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp)) + + @classmethod + def add_path_resolver(cls, tag, path, kind=None): + # Note: `add_path_resolver` is experimental. The API could be changed. + # `new_path` is a pattern that is matched against the path from the + # root to the node that is being considered. `node_path` elements are + # tuples `(node_check, index_check)`. `node_check` is a node class: + # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None` + # matches any kind of a node. `index_check` could be `None`, a boolean + # value, a string value, or a number. `None` and `False` match against + # any _value_ of sequence and mapping nodes. `True` matches against + # any _key_ of a mapping node. A string `index_check` matches against + # a mapping value that corresponds to a scalar key which content is + # equal to the `index_check` value. An integer `index_check` matches + # against a sequence value with the index equal to `index_check`. + if not 'yaml_path_resolvers' in cls.__dict__: + cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy() + new_path = [] + for element in path: + if isinstance(element, (list, tuple)): + if len(element) == 2: + node_check, index_check = element + elif len(element) == 1: + node_check = element[0] + index_check = True + else: + raise ResolverError("Invalid path element: %s" % element) + else: + node_check = None + index_check = element + if node_check is str: + node_check = ScalarNode + elif node_check is list: + node_check = SequenceNode + elif node_check is dict: + node_check = MappingNode + elif node_check not in [ScalarNode, SequenceNode, MappingNode] \ + and not isinstance(node_check, str) \ + and node_check is not None: + raise ResolverError("Invalid node checker: %s" % node_check) + if not isinstance(index_check, (str, int)) \ + and index_check is not None: + raise ResolverError("Invalid index checker: %s" % index_check) + new_path.append((node_check, index_check)) + if kind is str: + kind = ScalarNode + elif kind is list: + kind = SequenceNode + elif kind is dict: + kind = MappingNode + elif kind not in [ScalarNode, SequenceNode, MappingNode] \ + and kind is not None: + raise ResolverError("Invalid node kind: %s" % kind) + cls.yaml_path_resolvers[tuple(new_path), kind] = tag + + def descend_resolver(self, current_node, current_index): + if not self.yaml_path_resolvers: + return + exact_paths = {} + prefix_paths = [] + if current_node: + depth = len(self.resolver_prefix_paths) + for path, kind in self.resolver_prefix_paths[-1]: + if self.check_resolver_prefix(depth, path, kind, + current_node, current_index): + if len(path) > depth: + prefix_paths.append((path, kind)) + else: + exact_paths[kind] = self.yaml_path_resolvers[path, kind] + else: + for path, kind in self.yaml_path_resolvers: + if not path: + exact_paths[kind] = self.yaml_path_resolvers[path, kind] + else: + prefix_paths.append((path, kind)) + self.resolver_exact_paths.append(exact_paths) + self.resolver_prefix_paths.append(prefix_paths) + + def ascend_resolver(self): + if not self.yaml_path_resolvers: + return + self.resolver_exact_paths.pop() + self.resolver_prefix_paths.pop() + + def check_resolver_prefix(self, depth, path, kind, + current_node, current_index): + node_check, index_check = path[depth-1] + if isinstance(node_check, str): + if current_node.tag != node_check: + return + elif node_check is not None: + if not isinstance(current_node, node_check): + return + if index_check is True and current_index is not None: + return + if (index_check is False or index_check is None) \ + and current_index is None: + return + if isinstance(index_check, str): + if not (isinstance(current_index, ScalarNode) + and index_check == current_index.value): + return + elif isinstance(index_check, int) and not isinstance(index_check, bool): + if index_check != current_index: + return + return True + + def resolve(self, kind, value, implicit): + if kind is ScalarNode and implicit[0]: + if value == '': + resolvers = self.yaml_implicit_resolvers.get('', []) + else: + resolvers = self.yaml_implicit_resolvers.get(value[0], []) + wildcard_resolvers = self.yaml_implicit_resolvers.get(None, []) + for tag, regexp in resolvers + wildcard_resolvers: + if regexp.match(value): + return tag + implicit = implicit[1] + if self.yaml_path_resolvers: + exact_paths = self.resolver_exact_paths[-1] + if kind in exact_paths: + return exact_paths[kind] + if None in exact_paths: + return exact_paths[None] + if kind is ScalarNode: + return self.DEFAULT_SCALAR_TAG + elif kind is SequenceNode: + return self.DEFAULT_SEQUENCE_TAG + elif kind is MappingNode: + return self.DEFAULT_MAPPING_TAG + +class Resolver(BaseResolver): + pass + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:bool', + re.compile(r'''^(?:yes|Yes|YES|no|No|NO + |true|True|TRUE|false|False|FALSE + |on|On|ON|off|Off|OFF)$''', re.X), + list('yYnNtTfFoO')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:float', + re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)? + |\.[0-9_]+(?:[eE][-+][0-9]+)? + |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* + |[-+]?\.(?:inf|Inf|INF) + |\.(?:nan|NaN|NAN))$''', re.X), + list('-+0123456789.')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:int', + re.compile(r'''^(?:[-+]?0b[0-1_]+ + |[-+]?0[0-7_]+ + |[-+]?(?:0|[1-9][0-9_]*) + |[-+]?0x[0-9a-fA-F_]+ + |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), + list('-+0123456789')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:merge', + re.compile(r'^(?:<<)$'), + ['<']) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:null', + re.compile(r'''^(?: ~ + |null|Null|NULL + | )$''', re.X), + ['~', 'n', 'N', '']) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:timestamp', + re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] + |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? + (?:[Tt]|[ \t]+)[0-9][0-9]? + :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? + (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X), + list('0123456789')) + +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:value', + re.compile(r'^(?:=)$'), + ['=']) + +# The following resolver is only for documentation purposes. It cannot work +# because plain scalars cannot start with '!', '&', or '*'. +Resolver.add_implicit_resolver( + 'tag:yaml.org,2002:yaml', + re.compile(r'^(?:!|&|\*)$'), + list('!&*')) + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/scanner.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/scanner.py new file mode 100644 index 0000000..7437ede --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/scanner.py @@ -0,0 +1,1435 @@ + +# Scanner produces tokens of the following types: +# STREAM-START +# STREAM-END +# DIRECTIVE(name, value) +# DOCUMENT-START +# DOCUMENT-END +# BLOCK-SEQUENCE-START +# BLOCK-MAPPING-START +# BLOCK-END +# FLOW-SEQUENCE-START +# FLOW-MAPPING-START +# FLOW-SEQUENCE-END +# FLOW-MAPPING-END +# BLOCK-ENTRY +# FLOW-ENTRY +# KEY +# VALUE +# ALIAS(value) +# ANCHOR(value) +# TAG(value) +# SCALAR(value, plain, style) +# +# Read comments in the Scanner code for more details. +# + +__all__ = ['Scanner', 'ScannerError'] + +from .error import MarkedYAMLError +from .tokens import * + +class ScannerError(MarkedYAMLError): + pass + +class SimpleKey: + # See below simple keys treatment. + + def __init__(self, token_number, required, index, line, column, mark): + self.token_number = token_number + self.required = required + self.index = index + self.line = line + self.column = column + self.mark = mark + +class Scanner: + + def __init__(self): + """Initialize the scanner.""" + # It is assumed that Scanner and Reader will have a common descendant. + # Reader do the dirty work of checking for BOM and converting the + # input data to Unicode. It also adds NUL to the end. + # + # Reader supports the following methods + # self.peek(i=0) # peek the next i-th character + # self.prefix(l=1) # peek the next l characters + # self.forward(l=1) # read the next l characters and move the pointer. + + # Had we reached the end of the stream? + self.done = False + + # The number of unclosed '{' and '['. `flow_level == 0` means block + # context. + self.flow_level = 0 + + # List of processed tokens that are not yet emitted. + self.tokens = [] + + # Add the STREAM-START token. + self.fetch_stream_start() + + # Number of tokens that were emitted through the `get_token` method. + self.tokens_taken = 0 + + # The current indentation level. + self.indent = -1 + + # Past indentation levels. + self.indents = [] + + # Variables related to simple keys treatment. + + # A simple key is a key that is not denoted by the '?' indicator. + # Example of simple keys: + # --- + # block simple key: value + # ? not a simple key: + # : { flow simple key: value } + # We emit the KEY token before all keys, so when we find a potential + # simple key, we try to locate the corresponding ':' indicator. + # Simple keys should be limited to a single line and 1024 characters. + + # Can a simple key start at the current position? A simple key may + # start: + # - at the beginning of the line, not counting indentation spaces + # (in block context), + # - after '{', '[', ',' (in the flow context), + # - after '?', ':', '-' (in the block context). + # In the block context, this flag also signifies if a block collection + # may start at the current position. + self.allow_simple_key = True + + # Keep track of possible simple keys. This is a dictionary. The key + # is `flow_level`; there can be no more that one possible simple key + # for each level. The value is a SimpleKey record: + # (token_number, required, index, line, column, mark) + # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow), + # '[', or '{' tokens. + self.possible_simple_keys = {} + + # Public methods. + + def check_token(self, *choices): + # Check if the next token is one of the given types. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + if not choices: + return True + for choice in choices: + if isinstance(self.tokens[0], choice): + return True + return False + + def peek_token(self): + # Return the next token, but do not delete if from the queue. + # Return None if no more tokens. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + return self.tokens[0] + else: + return None + + def get_token(self): + # Return the next token. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + self.tokens_taken += 1 + return self.tokens.pop(0) + + # Private methods. + + def need_more_tokens(self): + if self.done: + return False + if not self.tokens: + return True + # The current token may be a potential simple key, so we + # need to look further. + self.stale_possible_simple_keys() + if self.next_possible_simple_key() == self.tokens_taken: + return True + + def fetch_more_tokens(self): + + # Eat whitespaces and comments until we reach the next token. + self.scan_to_next_token() + + # Remove obsolete possible simple keys. + self.stale_possible_simple_keys() + + # Compare the current indentation and column. It may add some tokens + # and decrease the current indentation level. + self.unwind_indent(self.column) + + # Peek the next character. + ch = self.peek() + + # Is it the end of stream? + if ch == '\0': + return self.fetch_stream_end() + + # Is it a directive? + if ch == '%' and self.check_directive(): + return self.fetch_directive() + + # Is it the document start? + if ch == '-' and self.check_document_start(): + return self.fetch_document_start() + + # Is it the document end? + if ch == '.' and self.check_document_end(): + return self.fetch_document_end() + + # TODO: support for BOM within a stream. + #if ch == '\uFEFF': + # return self.fetch_bom() <-- issue BOMToken + + # Note: the order of the following checks is NOT significant. + + # Is it the flow sequence start indicator? + if ch == '[': + return self.fetch_flow_sequence_start() + + # Is it the flow mapping start indicator? + if ch == '{': + return self.fetch_flow_mapping_start() + + # Is it the flow sequence end indicator? + if ch == ']': + return self.fetch_flow_sequence_end() + + # Is it the flow mapping end indicator? + if ch == '}': + return self.fetch_flow_mapping_end() + + # Is it the flow entry indicator? + if ch == ',': + return self.fetch_flow_entry() + + # Is it the block entry indicator? + if ch == '-' and self.check_block_entry(): + return self.fetch_block_entry() + + # Is it the key indicator? + if ch == '?' and self.check_key(): + return self.fetch_key() + + # Is it the value indicator? + if ch == ':' and self.check_value(): + return self.fetch_value() + + # Is it an alias? + if ch == '*': + return self.fetch_alias() + + # Is it an anchor? + if ch == '&': + return self.fetch_anchor() + + # Is it a tag? + if ch == '!': + return self.fetch_tag() + + # Is it a literal scalar? + if ch == '|' and not self.flow_level: + return self.fetch_literal() + + # Is it a folded scalar? + if ch == '>' and not self.flow_level: + return self.fetch_folded() + + # Is it a single quoted scalar? + if ch == '\'': + return self.fetch_single() + + # Is it a double quoted scalar? + if ch == '\"': + return self.fetch_double() + + # It must be a plain scalar then. + if self.check_plain(): + return self.fetch_plain() + + # No? It's an error. Let's produce a nice error message. + raise ScannerError("while scanning for the next token", None, + "found character %r that cannot start any token" % ch, + self.get_mark()) + + # Simple keys treatment. + + def next_possible_simple_key(self): + # Return the number of the nearest possible simple key. Actually we + # don't need to loop through the whole dictionary. We may replace it + # with the following code: + # if not self.possible_simple_keys: + # return None + # return self.possible_simple_keys[ + # min(self.possible_simple_keys.keys())].token_number + min_token_number = None + for level in self.possible_simple_keys: + key = self.possible_simple_keys[level] + if min_token_number is None or key.token_number < min_token_number: + min_token_number = key.token_number + return min_token_number + + def stale_possible_simple_keys(self): + # Remove entries that are no longer possible simple keys. According to + # the YAML specification, simple keys + # - should be limited to a single line, + # - should be no longer than 1024 characters. + # Disabling this procedure will allow simple keys of any length and + # height (may cause problems if indentation is broken though). + for level in list(self.possible_simple_keys): + key = self.possible_simple_keys[level] + if key.line != self.line \ + or self.index-key.index > 1024: + if key.required: + raise ScannerError("while scanning a simple key", key.mark, + "could not find expected ':'", self.get_mark()) + del self.possible_simple_keys[level] + + def save_possible_simple_key(self): + # The next token may start a simple key. We check if it's possible + # and save its position. This function is called for + # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. + + # Check if a simple key is required at the current position. + required = not self.flow_level and self.indent == self.column + + # The next token might be a simple key. Let's save it's number and + # position. + if self.allow_simple_key: + self.remove_possible_simple_key() + token_number = self.tokens_taken+len(self.tokens) + key = SimpleKey(token_number, required, + self.index, self.line, self.column, self.get_mark()) + self.possible_simple_keys[self.flow_level] = key + + def remove_possible_simple_key(self): + # Remove the saved possible key position at the current flow level. + if self.flow_level in self.possible_simple_keys: + key = self.possible_simple_keys[self.flow_level] + + if key.required: + raise ScannerError("while scanning a simple key", key.mark, + "could not find expected ':'", self.get_mark()) + + del self.possible_simple_keys[self.flow_level] + + # Indentation functions. + + def unwind_indent(self, column): + + ## In flow context, tokens should respect indentation. + ## Actually the condition should be `self.indent >= column` according to + ## the spec. But this condition will prohibit intuitively correct + ## constructions such as + ## key : { + ## } + #if self.flow_level and self.indent > column: + # raise ScannerError(None, None, + # "invalid indentation or unclosed '[' or '{'", + # self.get_mark()) + + # In the flow context, indentation is ignored. We make the scanner less + # restrictive then specification requires. + if self.flow_level: + return + + # In block context, we may need to issue the BLOCK-END tokens. + while self.indent > column: + mark = self.get_mark() + self.indent = self.indents.pop() + self.tokens.append(BlockEndToken(mark, mark)) + + def add_indent(self, column): + # Check if we need to increase indentation. + if self.indent < column: + self.indents.append(self.indent) + self.indent = column + return True + return False + + # Fetchers. + + def fetch_stream_start(self): + # We always add STREAM-START as the first token and STREAM-END as the + # last token. + + # Read the token. + mark = self.get_mark() + + # Add STREAM-START. + self.tokens.append(StreamStartToken(mark, mark, + encoding=self.encoding)) + + + def fetch_stream_end(self): + + # Set the current indentation to -1. + self.unwind_indent(-1) + + # Reset simple keys. + self.remove_possible_simple_key() + self.allow_simple_key = False + self.possible_simple_keys = {} + + # Read the token. + mark = self.get_mark() + + # Add STREAM-END. + self.tokens.append(StreamEndToken(mark, mark)) + + # The steam is finished. + self.done = True + + def fetch_directive(self): + + # Set the current indentation to -1. + self.unwind_indent(-1) + + # Reset simple keys. + self.remove_possible_simple_key() + self.allow_simple_key = False + + # Scan and add DIRECTIVE. + self.tokens.append(self.scan_directive()) + + def fetch_document_start(self): + self.fetch_document_indicator(DocumentStartToken) + + def fetch_document_end(self): + self.fetch_document_indicator(DocumentEndToken) + + def fetch_document_indicator(self, TokenClass): + + # Set the current indentation to -1. + self.unwind_indent(-1) + + # Reset simple keys. Note that there could not be a block collection + # after '---'. + self.remove_possible_simple_key() + self.allow_simple_key = False + + # Add DOCUMENT-START or DOCUMENT-END. + start_mark = self.get_mark() + self.forward(3) + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_sequence_start(self): + self.fetch_flow_collection_start(FlowSequenceStartToken) + + def fetch_flow_mapping_start(self): + self.fetch_flow_collection_start(FlowMappingStartToken) + + def fetch_flow_collection_start(self, TokenClass): + + # '[' and '{' may start a simple key. + self.save_possible_simple_key() + + # Increase the flow level. + self.flow_level += 1 + + # Simple keys are allowed after '[' and '{'. + self.allow_simple_key = True + + # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_sequence_end(self): + self.fetch_flow_collection_end(FlowSequenceEndToken) + + def fetch_flow_mapping_end(self): + self.fetch_flow_collection_end(FlowMappingEndToken) + + def fetch_flow_collection_end(self, TokenClass): + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Decrease the flow level. + self.flow_level -= 1 + + # No simple keys after ']' or '}'. + self.allow_simple_key = False + + # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_entry(self): + + # Simple keys are allowed after ','. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add FLOW-ENTRY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(FlowEntryToken(start_mark, end_mark)) + + def fetch_block_entry(self): + + # Block context needs additional checks. + if not self.flow_level: + + # Are we allowed to start a new entry? + if not self.allow_simple_key: + raise ScannerError(None, None, + "sequence entries are not allowed here", + self.get_mark()) + + # We may need to add BLOCK-SEQUENCE-START. + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockSequenceStartToken(mark, mark)) + + # It's an error for the block entry to occur in the flow context, + # but we let the parser detect this. + else: + pass + + # Simple keys are allowed after '-'. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add BLOCK-ENTRY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(BlockEntryToken(start_mark, end_mark)) + + def fetch_key(self): + + # Block context needs additional checks. + if not self.flow_level: + + # Are we allowed to start a key (not necessary a simple)? + if not self.allow_simple_key: + raise ScannerError(None, None, + "mapping keys are not allowed here", + self.get_mark()) + + # We may need to add BLOCK-MAPPING-START. + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockMappingStartToken(mark, mark)) + + # Simple keys are allowed after '?' in the block context. + self.allow_simple_key = not self.flow_level + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add KEY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(KeyToken(start_mark, end_mark)) + + def fetch_value(self): + + # Do we determine a simple key? + if self.flow_level in self.possible_simple_keys: + + # Add KEY. + key = self.possible_simple_keys[self.flow_level] + del self.possible_simple_keys[self.flow_level] + self.tokens.insert(key.token_number-self.tokens_taken, + KeyToken(key.mark, key.mark)) + + # If this key starts a new block mapping, we need to add + # BLOCK-MAPPING-START. + if not self.flow_level: + if self.add_indent(key.column): + self.tokens.insert(key.token_number-self.tokens_taken, + BlockMappingStartToken(key.mark, key.mark)) + + # There cannot be two simple keys one after another. + self.allow_simple_key = False + + # It must be a part of a complex key. + else: + + # Block context needs additional checks. + # (Do we really need them? They will be caught by the parser + # anyway.) + if not self.flow_level: + + # We are allowed to start a complex value if and only if + # we can start a simple key. + if not self.allow_simple_key: + raise ScannerError(None, None, + "mapping values are not allowed here", + self.get_mark()) + + # If this value starts a new block mapping, we need to add + # BLOCK-MAPPING-START. It will be detected as an error later by + # the parser. + if not self.flow_level: + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockMappingStartToken(mark, mark)) + + # Simple keys are allowed after ':' in the block context. + self.allow_simple_key = not self.flow_level + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add VALUE. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(ValueToken(start_mark, end_mark)) + + def fetch_alias(self): + + # ALIAS could be a simple key. + self.save_possible_simple_key() + + # No simple keys after ALIAS. + self.allow_simple_key = False + + # Scan and add ALIAS. + self.tokens.append(self.scan_anchor(AliasToken)) + + def fetch_anchor(self): + + # ANCHOR could start a simple key. + self.save_possible_simple_key() + + # No simple keys after ANCHOR. + self.allow_simple_key = False + + # Scan and add ANCHOR. + self.tokens.append(self.scan_anchor(AnchorToken)) + + def fetch_tag(self): + + # TAG could start a simple key. + self.save_possible_simple_key() + + # No simple keys after TAG. + self.allow_simple_key = False + + # Scan and add TAG. + self.tokens.append(self.scan_tag()) + + def fetch_literal(self): + self.fetch_block_scalar(style='|') + + def fetch_folded(self): + self.fetch_block_scalar(style='>') + + def fetch_block_scalar(self, style): + + # A simple key may follow a block scalar. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Scan and add SCALAR. + self.tokens.append(self.scan_block_scalar(style)) + + def fetch_single(self): + self.fetch_flow_scalar(style='\'') + + def fetch_double(self): + self.fetch_flow_scalar(style='"') + + def fetch_flow_scalar(self, style): + + # A flow scalar could be a simple key. + self.save_possible_simple_key() + + # No simple keys after flow scalars. + self.allow_simple_key = False + + # Scan and add SCALAR. + self.tokens.append(self.scan_flow_scalar(style)) + + def fetch_plain(self): + + # A plain scalar could be a simple key. + self.save_possible_simple_key() + + # No simple keys after plain scalars. But note that `scan_plain` will + # change this flag if the scan is finished at the beginning of the + # line. + self.allow_simple_key = False + + # Scan and add SCALAR. May change `allow_simple_key`. + self.tokens.append(self.scan_plain()) + + # Checkers. + + def check_directive(self): + + # DIRECTIVE: ^ '%' ... + # The '%' indicator is already checked. + if self.column == 0: + return True + + def check_document_start(self): + + # DOCUMENT-START: ^ '---' (' '|'\n') + if self.column == 0: + if self.prefix(3) == '---' \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return True + + def check_document_end(self): + + # DOCUMENT-END: ^ '...' (' '|'\n') + if self.column == 0: + if self.prefix(3) == '...' \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return True + + def check_block_entry(self): + + # BLOCK-ENTRY: '-' (' '|'\n') + return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' + + def check_key(self): + + # KEY(flow context): '?' + if self.flow_level: + return True + + # KEY(block context): '?' (' '|'\n') + else: + return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' + + def check_value(self): + + # VALUE(flow context): ':' + if self.flow_level: + return True + + # VALUE(block context): ':' (' '|'\n') + else: + return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' + + def check_plain(self): + + # A plain scalar may start with any non-space character except: + # '-', '?', ':', ',', '[', ']', '{', '}', + # '#', '&', '*', '!', '|', '>', '\'', '\"', + # '%', '@', '`'. + # + # It may also start with + # '-', '?', ':' + # if it is followed by a non-space character. + # + # Note that we limit the last rule to the block context (except the + # '-' character) because we want the flow context to be space + # independent. + ch = self.peek() + return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \ + or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029' + and (ch == '-' or (not self.flow_level and ch in '?:'))) + + # Scanners. + + def scan_to_next_token(self): + # We ignore spaces, line breaks and comments. + # If we find a line break in the block context, we set the flag + # `allow_simple_key` on. + # The byte order mark is stripped if it's the first character in the + # stream. We do not yet support BOM inside the stream as the + # specification requires. Any such mark will be considered as a part + # of the document. + # + # TODO: We need to make tab handling rules more sane. A good rule is + # Tabs cannot precede tokens + # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, + # KEY(block), VALUE(block), BLOCK-ENTRY + # So the checking code is + # if : + # self.allow_simple_keys = False + # We also need to add the check for `allow_simple_keys == True` to + # `unwind_indent` before issuing BLOCK-END. + # Scanners for block, flow, and plain scalars need to be modified. + + if self.index == 0 and self.peek() == '\uFEFF': + self.forward() + found = False + while not found: + while self.peek() == ' ': + self.forward() + if self.peek() == '#': + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + if self.scan_line_break(): + if not self.flow_level: + self.allow_simple_key = True + else: + found = True + + def scan_directive(self): + # See the specification for details. + start_mark = self.get_mark() + self.forward() + name = self.scan_directive_name(start_mark) + value = None + if name == 'YAML': + value = self.scan_yaml_directive_value(start_mark) + end_mark = self.get_mark() + elif name == 'TAG': + value = self.scan_tag_directive_value(start_mark) + end_mark = self.get_mark() + else: + end_mark = self.get_mark() + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + self.scan_directive_ignored_line(start_mark) + return DirectiveToken(name, value, start_mark, end_mark) + + def scan_directive_name(self, start_mark): + # See the specification for details. + length = 0 + ch = self.peek(length) + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_': + length += 1 + ch = self.peek(length) + if not length: + raise ScannerError("while scanning a directive", start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + value = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + return value + + def scan_yaml_directive_value(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + major = self.scan_yaml_directive_number(start_mark) + if self.peek() != '.': + raise ScannerError("while scanning a directive", start_mark, + "expected a digit or '.', but found %r" % self.peek(), + self.get_mark()) + self.forward() + minor = self.scan_yaml_directive_number(start_mark) + if self.peek() not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected a digit or ' ', but found %r" % self.peek(), + self.get_mark()) + return (major, minor) + + def scan_yaml_directive_number(self, start_mark): + # See the specification for details. + ch = self.peek() + if not ('0' <= ch <= '9'): + raise ScannerError("while scanning a directive", start_mark, + "expected a digit, but found %r" % ch, self.get_mark()) + length = 0 + while '0' <= self.peek(length) <= '9': + length += 1 + value = int(self.prefix(length)) + self.forward(length) + return value + + def scan_tag_directive_value(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + handle = self.scan_tag_directive_handle(start_mark) + while self.peek() == ' ': + self.forward() + prefix = self.scan_tag_directive_prefix(start_mark) + return (handle, prefix) + + def scan_tag_directive_handle(self, start_mark): + # See the specification for details. + value = self.scan_tag_handle('directive', start_mark) + ch = self.peek() + if ch != ' ': + raise ScannerError("while scanning a directive", start_mark, + "expected ' ', but found %r" % ch, self.get_mark()) + return value + + def scan_tag_directive_prefix(self, start_mark): + # See the specification for details. + value = self.scan_tag_uri('directive', start_mark) + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected ' ', but found %r" % ch, self.get_mark()) + return value + + def scan_directive_ignored_line(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + if self.peek() == '#': + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + ch = self.peek() + if ch not in '\0\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected a comment or a line break, but found %r" + % ch, self.get_mark()) + self.scan_line_break() + + def scan_anchor(self, TokenClass): + # The specification does not restrict characters for anchors and + # aliases. This may lead to problems, for instance, the document: + # [ *alias, value ] + # can be interpreted in two ways, as + # [ "value" ] + # and + # [ *alias , "value" ] + # Therefore we restrict aliases to numbers and ASCII letters. + start_mark = self.get_mark() + indicator = self.peek() + if indicator == '*': + name = 'alias' + else: + name = 'anchor' + self.forward() + length = 0 + ch = self.peek(length) + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_': + length += 1 + ch = self.peek(length) + if not length: + raise ScannerError("while scanning an %s" % name, start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + value = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`': + raise ScannerError("while scanning an %s" % name, start_mark, + "expected alphabetic or numeric character, but found %r" + % ch, self.get_mark()) + end_mark = self.get_mark() + return TokenClass(value, start_mark, end_mark) + + def scan_tag(self): + # See the specification for details. + start_mark = self.get_mark() + ch = self.peek(1) + if ch == '<': + handle = None + self.forward(2) + suffix = self.scan_tag_uri('tag', start_mark) + if self.peek() != '>': + raise ScannerError("while parsing a tag", start_mark, + "expected '>', but found %r" % self.peek(), + self.get_mark()) + self.forward() + elif ch in '\0 \t\r\n\x85\u2028\u2029': + handle = None + suffix = '!' + self.forward() + else: + length = 1 + use_handle = False + while ch not in '\0 \r\n\x85\u2028\u2029': + if ch == '!': + use_handle = True + break + length += 1 + ch = self.peek(length) + handle = '!' + if use_handle: + handle = self.scan_tag_handle('tag', start_mark) + else: + handle = '!' + self.forward() + suffix = self.scan_tag_uri('tag', start_mark) + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a tag", start_mark, + "expected ' ', but found %r" % ch, self.get_mark()) + value = (handle, suffix) + end_mark = self.get_mark() + return TagToken(value, start_mark, end_mark) + + def scan_block_scalar(self, style): + # See the specification for details. + + if style == '>': + folded = True + else: + folded = False + + chunks = [] + start_mark = self.get_mark() + + # Scan the header. + self.forward() + chomping, increment = self.scan_block_scalar_indicators(start_mark) + self.scan_block_scalar_ignored_line(start_mark) + + # Determine the indentation level and go to the first non-empty line. + min_indent = self.indent+1 + if min_indent < 1: + min_indent = 1 + if increment is None: + breaks, max_indent, end_mark = self.scan_block_scalar_indentation() + indent = max(min_indent, max_indent) + else: + indent = min_indent+increment-1 + breaks, end_mark = self.scan_block_scalar_breaks(indent) + line_break = '' + + # Scan the inner part of the block scalar. + while self.column == indent and self.peek() != '\0': + chunks.extend(breaks) + leading_non_space = self.peek() not in ' \t' + length = 0 + while self.peek(length) not in '\0\r\n\x85\u2028\u2029': + length += 1 + chunks.append(self.prefix(length)) + self.forward(length) + line_break = self.scan_line_break() + breaks, end_mark = self.scan_block_scalar_breaks(indent) + if self.column == indent and self.peek() != '\0': + + # Unfortunately, folding rules are ambiguous. + # + # This is the folding according to the specification: + + if folded and line_break == '\n' \ + and leading_non_space and self.peek() not in ' \t': + if not breaks: + chunks.append(' ') + else: + chunks.append(line_break) + + # This is Clark Evans's interpretation (also in the spec + # examples): + # + #if folded and line_break == '\n': + # if not breaks: + # if self.peek() not in ' \t': + # chunks.append(' ') + # else: + # chunks.append(line_break) + #else: + # chunks.append(line_break) + else: + break + + # Chomp the tail. + if chomping is not False: + chunks.append(line_break) + if chomping is True: + chunks.extend(breaks) + + # We are done. + return ScalarToken(''.join(chunks), False, start_mark, end_mark, + style) + + def scan_block_scalar_indicators(self, start_mark): + # See the specification for details. + chomping = None + increment = None + ch = self.peek() + if ch in '+-': + if ch == '+': + chomping = True + else: + chomping = False + self.forward() + ch = self.peek() + if ch in '0123456789': + increment = int(ch) + if increment == 0: + raise ScannerError("while scanning a block scalar", start_mark, + "expected indentation indicator in the range 1-9, but found 0", + self.get_mark()) + self.forward() + elif ch in '0123456789': + increment = int(ch) + if increment == 0: + raise ScannerError("while scanning a block scalar", start_mark, + "expected indentation indicator in the range 1-9, but found 0", + self.get_mark()) + self.forward() + ch = self.peek() + if ch in '+-': + if ch == '+': + chomping = True + else: + chomping = False + self.forward() + ch = self.peek() + if ch not in '\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a block scalar", start_mark, + "expected chomping or indentation indicators, but found %r" + % ch, self.get_mark()) + return chomping, increment + + def scan_block_scalar_ignored_line(self, start_mark): + # See the specification for details. + while self.peek() == ' ': + self.forward() + if self.peek() == '#': + while self.peek() not in '\0\r\n\x85\u2028\u2029': + self.forward() + ch = self.peek() + if ch not in '\0\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a block scalar", start_mark, + "expected a comment or a line break, but found %r" % ch, + self.get_mark()) + self.scan_line_break() + + def scan_block_scalar_indentation(self): + # See the specification for details. + chunks = [] + max_indent = 0 + end_mark = self.get_mark() + while self.peek() in ' \r\n\x85\u2028\u2029': + if self.peek() != ' ': + chunks.append(self.scan_line_break()) + end_mark = self.get_mark() + else: + self.forward() + if self.column > max_indent: + max_indent = self.column + return chunks, max_indent, end_mark + + def scan_block_scalar_breaks(self, indent): + # See the specification for details. + chunks = [] + end_mark = self.get_mark() + while self.column < indent and self.peek() == ' ': + self.forward() + while self.peek() in '\r\n\x85\u2028\u2029': + chunks.append(self.scan_line_break()) + end_mark = self.get_mark() + while self.column < indent and self.peek() == ' ': + self.forward() + return chunks, end_mark + + def scan_flow_scalar(self, style): + # See the specification for details. + # Note that we loose indentation rules for quoted scalars. Quoted + # scalars don't need to adhere indentation because " and ' clearly + # mark the beginning and the end of them. Therefore we are less + # restrictive then the specification requires. We only need to check + # that document separators are not included in scalars. + if style == '"': + double = True + else: + double = False + chunks = [] + start_mark = self.get_mark() + quote = self.peek() + self.forward() + chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) + while self.peek() != quote: + chunks.extend(self.scan_flow_scalar_spaces(double, start_mark)) + chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) + self.forward() + end_mark = self.get_mark() + return ScalarToken(''.join(chunks), False, start_mark, end_mark, + style) + + ESCAPE_REPLACEMENTS = { + '0': '\0', + 'a': '\x07', + 'b': '\x08', + 't': '\x09', + '\t': '\x09', + 'n': '\x0A', + 'v': '\x0B', + 'f': '\x0C', + 'r': '\x0D', + 'e': '\x1B', + ' ': '\x20', + '\"': '\"', + '\\': '\\', + '/': '/', + 'N': '\x85', + '_': '\xA0', + 'L': '\u2028', + 'P': '\u2029', + } + + ESCAPE_CODES = { + 'x': 2, + 'u': 4, + 'U': 8, + } + + def scan_flow_scalar_non_spaces(self, double, start_mark): + # See the specification for details. + chunks = [] + while True: + length = 0 + while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029': + length += 1 + if length: + chunks.append(self.prefix(length)) + self.forward(length) + ch = self.peek() + if not double and ch == '\'' and self.peek(1) == '\'': + chunks.append('\'') + self.forward(2) + elif (double and ch == '\'') or (not double and ch in '\"\\'): + chunks.append(ch) + self.forward() + elif double and ch == '\\': + self.forward() + ch = self.peek() + if ch in self.ESCAPE_REPLACEMENTS: + chunks.append(self.ESCAPE_REPLACEMENTS[ch]) + self.forward() + elif ch in self.ESCAPE_CODES: + length = self.ESCAPE_CODES[ch] + self.forward() + for k in range(length): + if self.peek(k) not in '0123456789ABCDEFabcdef': + raise ScannerError("while scanning a double-quoted scalar", start_mark, + "expected escape sequence of %d hexdecimal numbers, but found %r" % + (length, self.peek(k)), self.get_mark()) + code = int(self.prefix(length), 16) + chunks.append(chr(code)) + self.forward(length) + elif ch in '\r\n\x85\u2028\u2029': + self.scan_line_break() + chunks.extend(self.scan_flow_scalar_breaks(double, start_mark)) + else: + raise ScannerError("while scanning a double-quoted scalar", start_mark, + "found unknown escape character %r" % ch, self.get_mark()) + else: + return chunks + + def scan_flow_scalar_spaces(self, double, start_mark): + # See the specification for details. + chunks = [] + length = 0 + while self.peek(length) in ' \t': + length += 1 + whitespaces = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch == '\0': + raise ScannerError("while scanning a quoted scalar", start_mark, + "found unexpected end of stream", self.get_mark()) + elif ch in '\r\n\x85\u2028\u2029': + line_break = self.scan_line_break() + breaks = self.scan_flow_scalar_breaks(double, start_mark) + if line_break != '\n': + chunks.append(line_break) + elif not breaks: + chunks.append(' ') + chunks.extend(breaks) + else: + chunks.append(whitespaces) + return chunks + + def scan_flow_scalar_breaks(self, double, start_mark): + # See the specification for details. + chunks = [] + while True: + # Instead of checking indentation, we check for document + # separators. + prefix = self.prefix(3) + if (prefix == '---' or prefix == '...') \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a quoted scalar", start_mark, + "found unexpected document separator", self.get_mark()) + while self.peek() in ' \t': + self.forward() + if self.peek() in '\r\n\x85\u2028\u2029': + chunks.append(self.scan_line_break()) + else: + return chunks + + def scan_plain(self): + # See the specification for details. + # We add an additional restriction for the flow context: + # plain scalars in the flow context cannot contain ',' or '?'. + # We also keep track of the `allow_simple_key` flag here. + # Indentation rules are loosed for the flow context. + chunks = [] + start_mark = self.get_mark() + end_mark = start_mark + indent = self.indent+1 + # We allow zero indentation for scalars, but then we need to check for + # document separators at the beginning of the line. + #if indent == 0: + # indent = 1 + spaces = [] + while True: + length = 0 + if self.peek() == '#': + break + while True: + ch = self.peek(length) + if ch in '\0 \t\r\n\x85\u2028\u2029' \ + or (ch == ':' and + self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029' + + (u',[]{}' if self.flow_level else u''))\ + or (self.flow_level and ch in ',?[]{}'): + break + length += 1 + if length == 0: + break + self.allow_simple_key = False + chunks.extend(spaces) + chunks.append(self.prefix(length)) + self.forward(length) + end_mark = self.get_mark() + spaces = self.scan_plain_spaces(indent, start_mark) + if not spaces or self.peek() == '#' \ + or (not self.flow_level and self.column < indent): + break + return ScalarToken(''.join(chunks), True, start_mark, end_mark) + + def scan_plain_spaces(self, indent, start_mark): + # See the specification for details. + # The specification is really confusing about tabs in plain scalars. + # We just forbid them completely. Do not use tabs in YAML! + chunks = [] + length = 0 + while self.peek(length) in ' ': + length += 1 + whitespaces = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch in '\r\n\x85\u2028\u2029': + line_break = self.scan_line_break() + self.allow_simple_key = True + prefix = self.prefix(3) + if (prefix == '---' or prefix == '...') \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return + breaks = [] + while self.peek() in ' \r\n\x85\u2028\u2029': + if self.peek() == ' ': + self.forward() + else: + breaks.append(self.scan_line_break()) + prefix = self.prefix(3) + if (prefix == '---' or prefix == '...') \ + and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': + return + if line_break != '\n': + chunks.append(line_break) + elif not breaks: + chunks.append(' ') + chunks.extend(breaks) + elif whitespaces: + chunks.append(whitespaces) + return chunks + + def scan_tag_handle(self, name, start_mark): + # See the specification for details. + # For some strange reasons, the specification does not allow '_' in + # tag handles. I have allowed it anyway. + ch = self.peek() + if ch != '!': + raise ScannerError("while scanning a %s" % name, start_mark, + "expected '!', but found %r" % ch, self.get_mark()) + length = 1 + ch = self.peek(length) + if ch != ' ': + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-_': + length += 1 + ch = self.peek(length) + if ch != '!': + self.forward(length) + raise ScannerError("while scanning a %s" % name, start_mark, + "expected '!', but found %r" % ch, self.get_mark()) + length += 1 + value = self.prefix(length) + self.forward(length) + return value + + def scan_tag_uri(self, name, start_mark): + # See the specification for details. + # Note: we do not check if URI is well-formed. + chunks = [] + length = 0 + ch = self.peek(length) + while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ + or ch in '-;/?:@&=+$,_.!~*\'()[]%': + if ch == '%': + chunks.append(self.prefix(length)) + self.forward(length) + length = 0 + chunks.append(self.scan_uri_escapes(name, start_mark)) + else: + length += 1 + ch = self.peek(length) + if length: + chunks.append(self.prefix(length)) + self.forward(length) + length = 0 + if not chunks: + raise ScannerError("while parsing a %s" % name, start_mark, + "expected URI, but found %r" % ch, self.get_mark()) + return ''.join(chunks) + + def scan_uri_escapes(self, name, start_mark): + # See the specification for details. + codes = [] + mark = self.get_mark() + while self.peek() == '%': + self.forward() + for k in range(2): + if self.peek(k) not in '0123456789ABCDEFabcdef': + raise ScannerError("while scanning a %s" % name, start_mark, + "expected URI escape sequence of 2 hexdecimal numbers, but found %r" + % self.peek(k), self.get_mark()) + codes.append(int(self.prefix(2), 16)) + self.forward(2) + try: + value = bytes(codes).decode('utf-8') + except UnicodeDecodeError as exc: + raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark) + return value + + def scan_line_break(self): + # Transforms: + # '\r\n' : '\n' + # '\r' : '\n' + # '\n' : '\n' + # '\x85' : '\n' + # '\u2028' : '\u2028' + # '\u2029 : '\u2029' + # default : '' + ch = self.peek() + if ch in '\r\n\x85': + if self.prefix(2) == '\r\n': + self.forward(2) + else: + self.forward() + return '\n' + elif ch in '\u2028\u2029': + self.forward() + return ch + return '' diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/serializer.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/serializer.py new file mode 100644 index 0000000..fe911e6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/serializer.py @@ -0,0 +1,111 @@ + +__all__ = ['Serializer', 'SerializerError'] + +from .error import YAMLError +from .events import * +from .nodes import * + +class SerializerError(YAMLError): + pass + +class Serializer: + + ANCHOR_TEMPLATE = 'id%03d' + + def __init__(self, encoding=None, + explicit_start=None, explicit_end=None, version=None, tags=None): + self.use_encoding = encoding + self.use_explicit_start = explicit_start + self.use_explicit_end = explicit_end + self.use_version = version + self.use_tags = tags + self.serialized_nodes = {} + self.anchors = {} + self.last_anchor_id = 0 + self.closed = None + + def open(self): + if self.closed is None: + self.emit(StreamStartEvent(encoding=self.use_encoding)) + self.closed = False + elif self.closed: + raise SerializerError("serializer is closed") + else: + raise SerializerError("serializer is already opened") + + def close(self): + if self.closed is None: + raise SerializerError("serializer is not opened") + elif not self.closed: + self.emit(StreamEndEvent()) + self.closed = True + + #def __del__(self): + # self.close() + + def serialize(self, node): + if self.closed is None: + raise SerializerError("serializer is not opened") + elif self.closed: + raise SerializerError("serializer is closed") + self.emit(DocumentStartEvent(explicit=self.use_explicit_start, + version=self.use_version, tags=self.use_tags)) + self.anchor_node(node) + self.serialize_node(node, None, None) + self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) + self.serialized_nodes = {} + self.anchors = {} + self.last_anchor_id = 0 + + def anchor_node(self, node): + if node in self.anchors: + if self.anchors[node] is None: + self.anchors[node] = self.generate_anchor(node) + else: + self.anchors[node] = None + if isinstance(node, SequenceNode): + for item in node.value: + self.anchor_node(item) + elif isinstance(node, MappingNode): + for key, value in node.value: + self.anchor_node(key) + self.anchor_node(value) + + def generate_anchor(self, node): + self.last_anchor_id += 1 + return self.ANCHOR_TEMPLATE % self.last_anchor_id + + def serialize_node(self, node, parent, index): + alias = self.anchors[node] + if node in self.serialized_nodes: + self.emit(AliasEvent(alias)) + else: + self.serialized_nodes[node] = True + self.descend_resolver(parent, index) + if isinstance(node, ScalarNode): + detected_tag = self.resolve(ScalarNode, node.value, (True, False)) + default_tag = self.resolve(ScalarNode, node.value, (False, True)) + implicit = (node.tag == detected_tag), (node.tag == default_tag) + self.emit(ScalarEvent(alias, node.tag, implicit, node.value, + style=node.style)) + elif isinstance(node, SequenceNode): + implicit = (node.tag + == self.resolve(SequenceNode, node.value, True)) + self.emit(SequenceStartEvent(alias, node.tag, implicit, + flow_style=node.flow_style)) + index = 0 + for item in node.value: + self.serialize_node(item, node, index) + index += 1 + self.emit(SequenceEndEvent()) + elif isinstance(node, MappingNode): + implicit = (node.tag + == self.resolve(MappingNode, node.value, True)) + self.emit(MappingStartEvent(alias, node.tag, implicit, + flow_style=node.flow_style)) + for key, value in node.value: + self.serialize_node(key, node, None) + self.serialize_node(value, node, key) + self.emit(MappingEndEvent()) + self.ascend_resolver() + diff --git a/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/tokens.py b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/tokens.py new file mode 100644 index 0000000..4d0b48a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/vendor/yaml/tokens.py @@ -0,0 +1,104 @@ + +class Token(object): + def __init__(self, start_mark, end_mark): + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + attributes = [key for key in self.__dict__ + if not key.endswith('_mark')] + attributes.sort() + arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) + for key in attributes]) + return '%s(%s)' % (self.__class__.__name__, arguments) + +#class BOMToken(Token): +# id = '' + +class DirectiveToken(Token): + id = '' + def __init__(self, name, value, start_mark, end_mark): + self.name = name + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class DocumentStartToken(Token): + id = '' + +class DocumentEndToken(Token): + id = '' + +class StreamStartToken(Token): + id = '' + def __init__(self, start_mark=None, end_mark=None, + encoding=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.encoding = encoding + +class StreamEndToken(Token): + id = '' + +class BlockSequenceStartToken(Token): + id = '' + +class BlockMappingStartToken(Token): + id = '' + +class BlockEndToken(Token): + id = '' + +class FlowSequenceStartToken(Token): + id = '[' + +class FlowMappingStartToken(Token): + id = '{' + +class FlowSequenceEndToken(Token): + id = ']' + +class FlowMappingEndToken(Token): + id = '}' + +class KeyToken(Token): + id = '?' + +class ValueToken(Token): + id = ':' + +class BlockEntryToken(Token): + id = '-' + +class FlowEntryToken(Token): + id = ',' + +class AliasToken(Token): + id = '' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class AnchorToken(Token): + id = '' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class TagToken(Token): + id = '' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class ScalarToken(Token): + id = '' + def __init__(self, value, plain, start_mark, end_mark, style=None): + self.value = value + self.plain = plain + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + diff --git a/.venv/lib/python3.9/site-packages/invoke/watchers.py b/.venv/lib/python3.9/site-packages/invoke/watchers.py new file mode 100644 index 0000000..eb813df --- /dev/null +++ b/.venv/lib/python3.9/site-packages/invoke/watchers.py @@ -0,0 +1,145 @@ +import re +import threading +from typing import Generator, Iterable + +from .exceptions import ResponseNotAccepted + + +class StreamWatcher(threading.local): + """ + A class whose subclasses may act on seen stream data from subprocesses. + + Subclasses must exhibit the following API; see `Responder` for a concrete + example. + + * ``__init__`` is completely up to each subclass, though as usual, + subclasses *of* subclasses should be careful to make use of `super` where + appropriate. + * `submit` must accept the entire current contents of the stream being + watched, as a string, and may optionally return an iterable of strings + (or act as a generator iterator, i.e. multiple calls to ``yield + ``), which will each be written to the subprocess' standard + input. + + .. note:: + `StreamWatcher` subclasses exist in part to enable state tracking, such + as detecting when a submitted password didn't work & erroring (or + prompting a user, or etc). Such bookkeeping isn't easily achievable + with simple callback functions. + + .. note:: + `StreamWatcher` subclasses `threading.local` so that its instances can + be used to 'watch' both subprocess stdout and stderr in separate + threads. + + .. versionadded:: 1.0 + """ + + def submit(self, stream: str) -> Iterable[str]: + """ + Act on ``stream`` data, potentially returning responses. + + :param str stream: + All data read on this stream since the beginning of the session. + + :returns: + An iterable of ``str`` (which may be empty). + + .. versionadded:: 1.0 + """ + raise NotImplementedError + + +class Responder(StreamWatcher): + """ + A parameterizable object that submits responses to specific patterns. + + Commonly used to implement password auto-responds for things like ``sudo``. + + .. versionadded:: 1.0 + """ + + def __init__(self, pattern: str, response: str) -> None: + r""" + Imprint this `Responder` with necessary parameters. + + :param pattern: + A raw string (e.g. ``r"\[sudo\] password for .*:"``) which will be + turned into a regular expression. + + :param response: + The string to submit to the subprocess' stdin when ``pattern`` is + detected. + """ + # TODO: precompile the keys into regex objects + self.pattern = pattern + self.response = response + self.index = 0 + + def pattern_matches( + self, stream: str, pattern: str, index_attr: str + ) -> Iterable[str]: + """ + Generic "search for pattern in stream, using index" behavior. + + Used here and in some subclasses that want to track multiple patterns + concurrently. + + :param str stream: The same data passed to ``submit``. + :param str pattern: The pattern to search for. + :param str index_attr: The name of the index attribute to use. + :returns: An iterable of string matches. + + .. versionadded:: 1.0 + """ + # NOTE: generifies scanning so it can be used to scan for >1 pattern at + # once, e.g. in FailingResponder. + # Only look at stream contents we haven't seen yet, to avoid dupes. + index = getattr(self, index_attr) + new = stream[index:] + # Search, across lines if necessary + matches = re.findall(pattern, new, re.S) + # Update seek index if we've matched + if matches: + setattr(self, index_attr, index + len(new)) + return matches + + def submit(self, stream: str) -> Generator[str, None, None]: + # Iterate over findall() response in case >1 match occurred. + for _ in self.pattern_matches(stream, self.pattern, "index"): + yield self.response + + +class FailingResponder(Responder): + """ + Variant of `Responder` which is capable of detecting incorrect responses. + + This class adds a ``sentinel`` parameter to ``__init__``, and its + ``submit`` will raise `.ResponseNotAccepted` if it detects that sentinel + value in the stream. + + .. versionadded:: 1.0 + """ + + def __init__(self, pattern: str, response: str, sentinel: str) -> None: + super().__init__(pattern, response) + self.sentinel = sentinel + self.failure_index = 0 + self.tried = False + + def submit(self, stream: str) -> Generator[str, None, None]: + # Behave like regular Responder initially + response = super().submit(stream) + # Also check stream for our failure sentinel + failed = self.pattern_matches(stream, self.sentinel, "failure_index") + # Error out if we seem to have failed after a previous response. + if self.tried and failed: + err = 'Auto-response to r"{}" failed with {!r}!'.format( + self.pattern, self.sentinel + ) + raise ResponseNotAccepted(err) + # Once we see that we had a response, take note + if response: + self.tried = True + # Again, behave regularly by default. + return response diff --git a/.venv/lib/python3.9/site-packages/nacl/__init__.py b/.venv/lib/python3.9/site-packages/nacl/__init__.py new file mode 100644 index 0000000..83aaacf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/__init__.py @@ -0,0 +1,25 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +__all__ = [ + "__uri__", + "__version__", + "__email__", +] + +__uri__ = "https://github.com/pyca/pynacl/" + +# Must be kept in sync with `pyproject.toml` +__version__ = "1.6.2" diff --git a/.venv/lib/python3.9/site-packages/nacl/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..1b05224 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/__pycache__/encoding.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/__pycache__/encoding.cpython-39.pyc new file mode 100644 index 0000000..70f5117 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/__pycache__/encoding.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/__pycache__/exceptions.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..e6b386b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/__pycache__/exceptions.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/__pycache__/hash.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/__pycache__/hash.cpython-39.pyc new file mode 100644 index 0000000..49e05fb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/__pycache__/hash.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/__pycache__/hashlib.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/__pycache__/hashlib.cpython-39.pyc new file mode 100644 index 0000000..3f46e74 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/__pycache__/hashlib.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/__pycache__/public.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/__pycache__/public.cpython-39.pyc new file mode 100644 index 0000000..b0b35a0 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/__pycache__/public.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/__pycache__/secret.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/__pycache__/secret.cpython-39.pyc new file mode 100644 index 0000000..df78f40 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/__pycache__/secret.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/__pycache__/signing.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/__pycache__/signing.cpython-39.pyc new file mode 100644 index 0000000..794b215 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/__pycache__/signing.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/__pycache__/utils.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..1df4dbb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/__pycache__/utils.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/_sodium.abi3.so b/.venv/lib/python3.9/site-packages/nacl/_sodium.abi3.so new file mode 100755 index 0000000..8e5581b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/_sodium.abi3.so differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__init__.py b/.venv/lib/python3.9/site-packages/nacl/bindings/__init__.py new file mode 100644 index 0000000..2e07ba1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/__init__.py @@ -0,0 +1,508 @@ +# Copyright 2013-2019 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl.bindings.crypto_aead import ( + crypto_aead_aegis128l_ABYTES, + crypto_aead_aegis128l_KEYBYTES, + crypto_aead_aegis128l_MESSAGEBYTES_MAX, + crypto_aead_aegis128l_NPUBBYTES, + crypto_aead_aegis128l_NSECBYTES, + crypto_aead_aegis128l_decrypt, + crypto_aead_aegis128l_encrypt, + crypto_aead_aegis256_ABYTES, + crypto_aead_aegis256_KEYBYTES, + crypto_aead_aegis256_MESSAGEBYTES_MAX, + crypto_aead_aegis256_NPUBBYTES, + crypto_aead_aegis256_NSECBYTES, + crypto_aead_aegis256_decrypt, + crypto_aead_aegis256_encrypt, + crypto_aead_aes256gcm_ABYTES, + crypto_aead_aes256gcm_KEYBYTES, + crypto_aead_aes256gcm_MESSAGEBYTES_MAX, + crypto_aead_aes256gcm_NPUBBYTES, + crypto_aead_aes256gcm_NSECBYTES, + crypto_aead_aes256gcm_decrypt, + crypto_aead_aes256gcm_encrypt, + crypto_aead_chacha20poly1305_ABYTES, + crypto_aead_chacha20poly1305_KEYBYTES, + crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX, + crypto_aead_chacha20poly1305_NPUBBYTES, + crypto_aead_chacha20poly1305_NSECBYTES, + crypto_aead_chacha20poly1305_decrypt, + crypto_aead_chacha20poly1305_encrypt, + crypto_aead_chacha20poly1305_ietf_ABYTES, + crypto_aead_chacha20poly1305_ietf_KEYBYTES, + crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX, + crypto_aead_chacha20poly1305_ietf_NPUBBYTES, + crypto_aead_chacha20poly1305_ietf_NSECBYTES, + crypto_aead_chacha20poly1305_ietf_decrypt, + crypto_aead_chacha20poly1305_ietf_encrypt, + crypto_aead_xchacha20poly1305_ietf_ABYTES, + crypto_aead_xchacha20poly1305_ietf_KEYBYTES, + crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX, + crypto_aead_xchacha20poly1305_ietf_NPUBBYTES, + crypto_aead_xchacha20poly1305_ietf_NSECBYTES, + crypto_aead_xchacha20poly1305_ietf_decrypt, + crypto_aead_xchacha20poly1305_ietf_encrypt, +) +from nacl.bindings.crypto_box import ( + crypto_box, + crypto_box_BEFORENMBYTES, + crypto_box_BOXZEROBYTES, + crypto_box_NONCEBYTES, + crypto_box_PUBLICKEYBYTES, + crypto_box_SEALBYTES, + crypto_box_SECRETKEYBYTES, + crypto_box_SEEDBYTES, + crypto_box_ZEROBYTES, + crypto_box_afternm, + crypto_box_beforenm, + crypto_box_easy, + crypto_box_easy_afternm, + crypto_box_keypair, + crypto_box_open, + crypto_box_open_afternm, + crypto_box_open_easy, + crypto_box_open_easy_afternm, + crypto_box_seal, + crypto_box_seal_open, + crypto_box_seed_keypair, +) +from nacl.bindings.crypto_core import ( + crypto_core_ed25519_BYTES, + crypto_core_ed25519_NONREDUCEDSCALARBYTES, + crypto_core_ed25519_SCALARBYTES, + crypto_core_ed25519_add, + crypto_core_ed25519_from_uniform, + crypto_core_ed25519_is_valid_point, + crypto_core_ed25519_scalar_add, + crypto_core_ed25519_scalar_complement, + crypto_core_ed25519_scalar_invert, + crypto_core_ed25519_scalar_mul, + crypto_core_ed25519_scalar_negate, + crypto_core_ed25519_scalar_reduce, + crypto_core_ed25519_scalar_sub, + crypto_core_ed25519_sub, + has_crypto_core_ed25519, +) +from nacl.bindings.crypto_generichash import ( + crypto_generichash_BYTES, + crypto_generichash_BYTES_MAX, + crypto_generichash_BYTES_MIN, + crypto_generichash_KEYBYTES, + crypto_generichash_KEYBYTES_MAX, + crypto_generichash_KEYBYTES_MIN, + crypto_generichash_PERSONALBYTES, + crypto_generichash_SALTBYTES, + crypto_generichash_STATEBYTES, + generichash_blake2b_final as crypto_generichash_blake2b_final, + generichash_blake2b_init as crypto_generichash_blake2b_init, + generichash_blake2b_salt_personal as crypto_generichash_blake2b_salt_personal, + generichash_blake2b_update as crypto_generichash_blake2b_update, +) +from nacl.bindings.crypto_hash import ( + crypto_hash, + crypto_hash_BYTES, + crypto_hash_sha256, + crypto_hash_sha256_BYTES, + crypto_hash_sha512, + crypto_hash_sha512_BYTES, +) +from nacl.bindings.crypto_kx import ( + crypto_kx_PUBLIC_KEY_BYTES, + crypto_kx_SECRET_KEY_BYTES, + crypto_kx_SEED_BYTES, + crypto_kx_SESSION_KEY_BYTES, + crypto_kx_client_session_keys, + crypto_kx_keypair, + crypto_kx_seed_keypair, + crypto_kx_server_session_keys, +) +from nacl.bindings.crypto_pwhash import ( + crypto_pwhash_ALG_ARGON2I13, + crypto_pwhash_ALG_ARGON2ID13, + crypto_pwhash_ALG_DEFAULT, + crypto_pwhash_BYTES_MAX, + crypto_pwhash_BYTES_MIN, + crypto_pwhash_PASSWD_MAX, + crypto_pwhash_PASSWD_MIN, + crypto_pwhash_SALTBYTES, + crypto_pwhash_STRBYTES, + crypto_pwhash_alg, + crypto_pwhash_argon2i_MEMLIMIT_INTERACTIVE, + crypto_pwhash_argon2i_MEMLIMIT_MAX, + crypto_pwhash_argon2i_MEMLIMIT_MIN, + crypto_pwhash_argon2i_MEMLIMIT_MODERATE, + crypto_pwhash_argon2i_MEMLIMIT_SENSITIVE, + crypto_pwhash_argon2i_OPSLIMIT_INTERACTIVE, + crypto_pwhash_argon2i_OPSLIMIT_MAX, + crypto_pwhash_argon2i_OPSLIMIT_MIN, + crypto_pwhash_argon2i_OPSLIMIT_MODERATE, + crypto_pwhash_argon2i_OPSLIMIT_SENSITIVE, + crypto_pwhash_argon2i_STRPREFIX, + crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE, + crypto_pwhash_argon2id_MEMLIMIT_MAX, + crypto_pwhash_argon2id_MEMLIMIT_MIN, + crypto_pwhash_argon2id_MEMLIMIT_MODERATE, + crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE, + crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE, + crypto_pwhash_argon2id_OPSLIMIT_MAX, + crypto_pwhash_argon2id_OPSLIMIT_MIN, + crypto_pwhash_argon2id_OPSLIMIT_MODERATE, + crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE, + crypto_pwhash_argon2id_STRPREFIX, + crypto_pwhash_scryptsalsa208sha256_BYTES_MAX, + crypto_pwhash_scryptsalsa208sha256_BYTES_MIN, + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE, + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX, + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN, + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE, + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE, + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX, + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN, + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE, + crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX, + crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN, + crypto_pwhash_scryptsalsa208sha256_SALTBYTES, + crypto_pwhash_scryptsalsa208sha256_STRBYTES, + crypto_pwhash_scryptsalsa208sha256_STRPREFIX, + crypto_pwhash_scryptsalsa208sha256_ll, + crypto_pwhash_scryptsalsa208sha256_str, + crypto_pwhash_scryptsalsa208sha256_str_verify, + crypto_pwhash_str_alg, + crypto_pwhash_str_verify, + has_crypto_pwhash_scryptsalsa208sha256, + nacl_bindings_pick_scrypt_params, +) +from nacl.bindings.crypto_scalarmult import ( + crypto_scalarmult, + crypto_scalarmult_BYTES, + crypto_scalarmult_SCALARBYTES, + crypto_scalarmult_base, + crypto_scalarmult_ed25519, + crypto_scalarmult_ed25519_BYTES, + crypto_scalarmult_ed25519_SCALARBYTES, + crypto_scalarmult_ed25519_base, + crypto_scalarmult_ed25519_base_noclamp, + crypto_scalarmult_ed25519_noclamp, + has_crypto_scalarmult_ed25519, +) +from nacl.bindings.crypto_secretbox import ( + crypto_secretbox, + crypto_secretbox_BOXZEROBYTES, + crypto_secretbox_KEYBYTES, + crypto_secretbox_MACBYTES, + crypto_secretbox_MESSAGEBYTES_MAX, + crypto_secretbox_NONCEBYTES, + crypto_secretbox_ZEROBYTES, + crypto_secretbox_easy, + crypto_secretbox_open, + crypto_secretbox_open_easy, +) +from nacl.bindings.crypto_secretstream import ( + crypto_secretstream_xchacha20poly1305_ABYTES, + crypto_secretstream_xchacha20poly1305_HEADERBYTES, + crypto_secretstream_xchacha20poly1305_KEYBYTES, + crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX, + crypto_secretstream_xchacha20poly1305_STATEBYTES, + crypto_secretstream_xchacha20poly1305_TAG_FINAL, + crypto_secretstream_xchacha20poly1305_TAG_MESSAGE, + crypto_secretstream_xchacha20poly1305_TAG_PUSH, + crypto_secretstream_xchacha20poly1305_TAG_REKEY, + crypto_secretstream_xchacha20poly1305_init_pull, + crypto_secretstream_xchacha20poly1305_init_push, + crypto_secretstream_xchacha20poly1305_keygen, + crypto_secretstream_xchacha20poly1305_pull, + crypto_secretstream_xchacha20poly1305_push, + crypto_secretstream_xchacha20poly1305_rekey, + crypto_secretstream_xchacha20poly1305_state, +) +from nacl.bindings.crypto_shorthash import ( + BYTES as crypto_shorthash_siphash24_BYTES, + KEYBYTES as crypto_shorthash_siphash24_KEYBYTES, + XBYTES as crypto_shorthash_siphashx24_BYTES, + XKEYBYTES as crypto_shorthash_siphashx24_KEYBYTES, + crypto_shorthash_siphash24, + crypto_shorthash_siphashx24, + has_crypto_shorthash_siphashx24, +) +from nacl.bindings.crypto_sign import ( + crypto_sign, + crypto_sign_BYTES, + crypto_sign_PUBLICKEYBYTES, + crypto_sign_SECRETKEYBYTES, + crypto_sign_SEEDBYTES, + crypto_sign_ed25519_pk_to_curve25519, + crypto_sign_ed25519_sk_to_curve25519, + crypto_sign_ed25519_sk_to_pk, + crypto_sign_ed25519_sk_to_seed, + crypto_sign_ed25519ph_STATEBYTES, + crypto_sign_ed25519ph_final_create, + crypto_sign_ed25519ph_final_verify, + crypto_sign_ed25519ph_state, + crypto_sign_ed25519ph_update, + crypto_sign_keypair, + crypto_sign_open, + crypto_sign_seed_keypair, +) +from nacl.bindings.randombytes import ( + randombytes, + randombytes_buf_deterministic, +) +from nacl.bindings.sodium_core import sodium_init +from nacl.bindings.utils import ( + sodium_add, + sodium_increment, + sodium_memcmp, + sodium_pad, + sodium_unpad, +) + + +__all__ = [ + "crypto_aead_aegis128l_ABYTES", + "crypto_aead_aegis128l_KEYBYTES", + "crypto_aead_aegis128l_MESSAGEBYTES_MAX", + "crypto_aead_aegis128l_NPUBBYTES", + "crypto_aead_aegis128l_NSECBYTES", + "crypto_aead_aegis128l_decrypt", + "crypto_aead_aegis128l_encrypt", + "crypto_aead_aegis256_ABYTES", + "crypto_aead_aegis256_KEYBYTES", + "crypto_aead_aegis256_MESSAGEBYTES_MAX", + "crypto_aead_aegis256_NPUBBYTES", + "crypto_aead_aegis256_NSECBYTES", + "crypto_aead_aegis256_decrypt", + "crypto_aead_aegis256_encrypt", + "crypto_aead_aes256gcm_ABYTES", + "crypto_aead_aes256gcm_KEYBYTES", + "crypto_aead_aes256gcm_MESSAGEBYTES_MAX", + "crypto_aead_aes256gcm_NPUBBYTES", + "crypto_aead_aes256gcm_NSECBYTES", + "crypto_aead_aes256gcm_decrypt", + "crypto_aead_aes256gcm_encrypt", + "crypto_aead_chacha20poly1305_ABYTES", + "crypto_aead_chacha20poly1305_KEYBYTES", + "crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX", + "crypto_aead_chacha20poly1305_NPUBBYTES", + "crypto_aead_chacha20poly1305_NSECBYTES", + "crypto_aead_chacha20poly1305_decrypt", + "crypto_aead_chacha20poly1305_encrypt", + "crypto_aead_chacha20poly1305_ietf_ABYTES", + "crypto_aead_chacha20poly1305_ietf_KEYBYTES", + "crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX", + "crypto_aead_chacha20poly1305_ietf_NPUBBYTES", + "crypto_aead_chacha20poly1305_ietf_NSECBYTES", + "crypto_aead_chacha20poly1305_ietf_decrypt", + "crypto_aead_chacha20poly1305_ietf_encrypt", + "crypto_aead_xchacha20poly1305_ietf_ABYTES", + "crypto_aead_xchacha20poly1305_ietf_KEYBYTES", + "crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX", + "crypto_aead_xchacha20poly1305_ietf_NPUBBYTES", + "crypto_aead_xchacha20poly1305_ietf_NSECBYTES", + "crypto_aead_xchacha20poly1305_ietf_decrypt", + "crypto_aead_xchacha20poly1305_ietf_encrypt", + "crypto_box_SECRETKEYBYTES", + "crypto_box_PUBLICKEYBYTES", + "crypto_box_SEEDBYTES", + "crypto_box_NONCEBYTES", + "crypto_box_ZEROBYTES", + "crypto_box_BOXZEROBYTES", + "crypto_box_BEFORENMBYTES", + "crypto_box_SEALBYTES", + "crypto_box_keypair", + "crypto_box", + "crypto_box_open", + "crypto_box_beforenm", + "crypto_box_afternm", + "crypto_box_open_afternm", + "crypto_box_easy", + "crypto_box_easy_afternm", + "crypto_box_open_easy", + "crypto_box_open_easy_afternm", + "crypto_box_seal", + "crypto_box_seal_open", + "crypto_box_seed_keypair", + "has_crypto_core_ed25519", + "crypto_core_ed25519_BYTES", + "crypto_core_ed25519_UNIFORMBYTES", + "crypto_core_ed25519_SCALARBYTES", + "crypto_core_ed25519_NONREDUCEDSCALARBYTES", + "crypto_core_ed25519_add", + "crypto_core_ed25519_from_uniform", + "crypto_core_ed25519_is_valid_point", + "crypto_core_ed25519_sub", + "crypto_core_ed25519_scalar_invert", + "crypto_core_ed25519_scalar_negate", + "crypto_core_ed25519_scalar_complement", + "crypto_core_ed25519_scalar_add", + "crypto_core_ed25519_scalar_sub", + "crypto_core_ed25519_scalar_mul", + "crypto_core_ed25519_scalar_reduce", + "crypto_hash_BYTES", + "crypto_hash_sha256_BYTES", + "crypto_hash_sha512_BYTES", + "crypto_hash", + "crypto_hash_sha256", + "crypto_hash_sha512", + "crypto_generichash_BYTES", + "crypto_generichash_BYTES_MIN", + "crypto_generichash_BYTES_MAX", + "crypto_generichash_KEYBYTES", + "crypto_generichash_KEYBYTES_MIN", + "crypto_generichash_KEYBYTES_MAX", + "crypto_generichash_SALTBYTES", + "crypto_generichash_PERSONALBYTES", + "crypto_generichash_STATEBYTES", + "crypto_generichash_blake2b_salt_personal", + "crypto_generichash_blake2b_init", + "crypto_generichash_blake2b_update", + "crypto_generichash_blake2b_final", + "crypto_kx_keypair", + "crypto_kx_seed_keypair", + "crypto_kx_client_session_keys", + "crypto_kx_server_session_keys", + "crypto_kx_PUBLIC_KEY_BYTES", + "crypto_kx_SECRET_KEY_BYTES", + "crypto_kx_SEED_BYTES", + "crypto_kx_SESSION_KEY_BYTES", + "has_crypto_scalarmult_ed25519", + "crypto_scalarmult_BYTES", + "crypto_scalarmult_SCALARBYTES", + "crypto_scalarmult", + "crypto_scalarmult_base", + "crypto_scalarmult_ed25519_BYTES", + "crypto_scalarmult_ed25519_SCALARBYTES", + "crypto_scalarmult_ed25519", + "crypto_scalarmult_ed25519_base", + "crypto_scalarmult_ed25519_noclamp", + "crypto_scalarmult_ed25519_base_noclamp", + "crypto_secretbox_KEYBYTES", + "crypto_secretbox_NONCEBYTES", + "crypto_secretbox_ZEROBYTES", + "crypto_secretbox_BOXZEROBYTES", + "crypto_secretbox_MACBYTES", + "crypto_secretbox_MESSAGEBYTES_MAX", + "crypto_secretbox", + "crypto_secretbox_easy", + "crypto_secretbox_open", + "crypto_secretbox_open_easy", + "crypto_secretstream_xchacha20poly1305_ABYTES", + "crypto_secretstream_xchacha20poly1305_HEADERBYTES", + "crypto_secretstream_xchacha20poly1305_KEYBYTES", + "crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX", + "crypto_secretstream_xchacha20poly1305_STATEBYTES", + "crypto_secretstream_xchacha20poly1305_TAG_FINAL", + "crypto_secretstream_xchacha20poly1305_TAG_MESSAGE", + "crypto_secretstream_xchacha20poly1305_TAG_PUSH", + "crypto_secretstream_xchacha20poly1305_TAG_REKEY", + "crypto_secretstream_xchacha20poly1305_init_pull", + "crypto_secretstream_xchacha20poly1305_init_push", + "crypto_secretstream_xchacha20poly1305_keygen", + "crypto_secretstream_xchacha20poly1305_pull", + "crypto_secretstream_xchacha20poly1305_push", + "crypto_secretstream_xchacha20poly1305_rekey", + "crypto_secretstream_xchacha20poly1305_state", + "has_crypto_shorthash_siphashx24", + "crypto_shorthash_siphash24_BYTES", + "crypto_shorthash_siphash24_KEYBYTES", + "crypto_shorthash_siphash24", + "crypto_shorthash_siphashx24_BYTES", + "crypto_shorthash_siphashx24_KEYBYTES", + "crypto_shorthash_siphashx24", + "crypto_sign_BYTES", + "crypto_sign_SEEDBYTES", + "crypto_sign_PUBLICKEYBYTES", + "crypto_sign_SECRETKEYBYTES", + "crypto_sign_keypair", + "crypto_sign_seed_keypair", + "crypto_sign", + "crypto_sign_open", + "crypto_sign_ed25519_pk_to_curve25519", + "crypto_sign_ed25519_sk_to_curve25519", + "crypto_sign_ed25519_sk_to_pk", + "crypto_sign_ed25519_sk_to_seed", + "crypto_sign_ed25519ph_STATEBYTES", + "crypto_sign_ed25519ph_final_create", + "crypto_sign_ed25519ph_final_verify", + "crypto_sign_ed25519ph_state", + "crypto_sign_ed25519ph_update", + "crypto_pwhash_ALG_ARGON2I13", + "crypto_pwhash_ALG_ARGON2ID13", + "crypto_pwhash_ALG_DEFAULT", + "crypto_pwhash_BYTES_MAX", + "crypto_pwhash_BYTES_MIN", + "crypto_pwhash_PASSWD_MAX", + "crypto_pwhash_PASSWD_MIN", + "crypto_pwhash_SALTBYTES", + "crypto_pwhash_STRBYTES", + "crypto_pwhash_alg", + "crypto_pwhash_argon2i_MEMLIMIT_MIN", + "crypto_pwhash_argon2i_MEMLIMIT_MAX", + "crypto_pwhash_argon2i_MEMLIMIT_INTERACTIVE", + "crypto_pwhash_argon2i_MEMLIMIT_MODERATE", + "crypto_pwhash_argon2i_MEMLIMIT_SENSITIVE", + "crypto_pwhash_argon2i_OPSLIMIT_MIN", + "crypto_pwhash_argon2i_OPSLIMIT_MAX", + "crypto_pwhash_argon2i_OPSLIMIT_INTERACTIVE", + "crypto_pwhash_argon2i_OPSLIMIT_MODERATE", + "crypto_pwhash_argon2i_OPSLIMIT_SENSITIVE", + "crypto_pwhash_argon2i_STRPREFIX", + "crypto_pwhash_argon2id_MEMLIMIT_MIN", + "crypto_pwhash_argon2id_MEMLIMIT_MAX", + "crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE", + "crypto_pwhash_argon2id_MEMLIMIT_MODERATE", + "crypto_pwhash_argon2id_OPSLIMIT_MIN", + "crypto_pwhash_argon2id_OPSLIMIT_MAX", + "crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE", + "crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE", + "crypto_pwhash_argon2id_OPSLIMIT_MODERATE", + "crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE", + "crypto_pwhash_argon2id_STRPREFIX", + "crypto_pwhash_str_alg", + "crypto_pwhash_str_verify", + "has_crypto_pwhash_scryptsalsa208sha256", + "crypto_pwhash_scryptsalsa208sha256_BYTES_MAX", + "crypto_pwhash_scryptsalsa208sha256_BYTES_MIN", + "crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE", + "crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX", + "crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN", + "crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE", + "crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE", + "crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX", + "crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN", + "crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE", + "crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX", + "crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN", + "crypto_pwhash_scryptsalsa208sha256_SALTBYTES", + "crypto_pwhash_scryptsalsa208sha256_STRBYTES", + "crypto_pwhash_scryptsalsa208sha256_STRPREFIX", + "crypto_pwhash_scryptsalsa208sha256_ll", + "crypto_pwhash_scryptsalsa208sha256_str", + "crypto_pwhash_scryptsalsa208sha256_str_verify", + "nacl_bindings_pick_scrypt_params", + "randombytes", + "randombytes_buf_deterministic", + "sodium_init", + "sodium_add", + "sodium_increment", + "sodium_memcmp", + "sodium_pad", + "sodium_unpad", +] + + +# Initialize Sodium +sodium_init() diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..a290802 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_aead.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_aead.cpython-39.pyc new file mode 100644 index 0000000..3cc69a2 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_aead.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_box.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_box.cpython-39.pyc new file mode 100644 index 0000000..15de45a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_box.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_core.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_core.cpython-39.pyc new file mode 100644 index 0000000..529bc03 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_core.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_generichash.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_generichash.cpython-39.pyc new file mode 100644 index 0000000..96a048a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_generichash.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_hash.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_hash.cpython-39.pyc new file mode 100644 index 0000000..60e228b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_hash.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_kx.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_kx.cpython-39.pyc new file mode 100644 index 0000000..ddd7b84 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_kx.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_pwhash.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_pwhash.cpython-39.pyc new file mode 100644 index 0000000..90504ec Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_pwhash.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_scalarmult.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_scalarmult.cpython-39.pyc new file mode 100644 index 0000000..2cc8784 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_scalarmult.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_secretbox.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_secretbox.cpython-39.pyc new file mode 100644 index 0000000..1d76973 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_secretbox.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_secretstream.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_secretstream.cpython-39.pyc new file mode 100644 index 0000000..18bb95d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_secretstream.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_shorthash.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_shorthash.cpython-39.pyc new file mode 100644 index 0000000..0aed63c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_shorthash.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_sign.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_sign.cpython-39.pyc new file mode 100644 index 0000000..d633c08 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/crypto_sign.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/randombytes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/randombytes.cpython-39.pyc new file mode 100644 index 0000000..3f5aa90 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/randombytes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/sodium_core.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/sodium_core.cpython-39.pyc new file mode 100644 index 0000000..ff57bda Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/sodium_core.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/utils.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..37ebc82 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/bindings/__pycache__/utils.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_aead.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_aead.py new file mode 100644 index 0000000..2f7da78 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_aead.py @@ -0,0 +1,1069 @@ +# Copyright 2017 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + +""" +Implementations of authenticated encription with associated data (*AEAD*) +constructions building on the chacha20 stream cipher and the poly1305 +authenticator +""" + +crypto_aead_chacha20poly1305_ietf_KEYBYTES: int = ( + lib.crypto_aead_chacha20poly1305_ietf_keybytes() +) +crypto_aead_chacha20poly1305_ietf_NSECBYTES: int = ( + lib.crypto_aead_chacha20poly1305_ietf_nsecbytes() +) +crypto_aead_chacha20poly1305_ietf_NPUBBYTES: int = ( + lib.crypto_aead_chacha20poly1305_ietf_npubbytes() +) +crypto_aead_chacha20poly1305_ietf_ABYTES: int = ( + lib.crypto_aead_chacha20poly1305_ietf_abytes() +) +crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_chacha20poly1305_ietf_messagebytes_max() +) +_aead_chacha20poly1305_ietf_CRYPTBYTES_MAX = ( + crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX + + crypto_aead_chacha20poly1305_ietf_ABYTES +) + +crypto_aead_chacha20poly1305_KEYBYTES: int = ( + lib.crypto_aead_chacha20poly1305_keybytes() +) +crypto_aead_chacha20poly1305_NSECBYTES: int = ( + lib.crypto_aead_chacha20poly1305_nsecbytes() +) +crypto_aead_chacha20poly1305_NPUBBYTES: int = ( + lib.crypto_aead_chacha20poly1305_npubbytes() +) +crypto_aead_chacha20poly1305_ABYTES: int = ( + lib.crypto_aead_chacha20poly1305_abytes() +) +crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_chacha20poly1305_messagebytes_max() +) +_aead_chacha20poly1305_CRYPTBYTES_MAX = ( + crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX + + crypto_aead_chacha20poly1305_ABYTES +) + +crypto_aead_xchacha20poly1305_ietf_KEYBYTES: int = ( + lib.crypto_aead_xchacha20poly1305_ietf_keybytes() +) +crypto_aead_xchacha20poly1305_ietf_NSECBYTES: int = ( + lib.crypto_aead_xchacha20poly1305_ietf_nsecbytes() +) +crypto_aead_xchacha20poly1305_ietf_NPUBBYTES: int = ( + lib.crypto_aead_xchacha20poly1305_ietf_npubbytes() +) +crypto_aead_xchacha20poly1305_ietf_ABYTES: int = ( + lib.crypto_aead_xchacha20poly1305_ietf_abytes() +) +crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_xchacha20poly1305_ietf_messagebytes_max() +) +_aead_xchacha20poly1305_ietf_CRYPTBYTES_MAX = ( + crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX + + crypto_aead_xchacha20poly1305_ietf_ABYTES +) + +crypto_aead_aegis256_KEYBYTES: int = lib.crypto_aead_aegis256_keybytes() +crypto_aead_aegis256_NSECBYTES: int = lib.crypto_aead_aegis256_nsecbytes() +crypto_aead_aegis256_NPUBBYTES: int = lib.crypto_aead_aegis256_npubbytes() +crypto_aead_aegis256_ABYTES: int = lib.crypto_aead_aegis256_abytes() +crypto_aead_aegis256_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_aegis256_messagebytes_max() +) +_aead_aegis256_CRYPTBYTES_MAX = ( + crypto_aead_aegis256_MESSAGEBYTES_MAX + crypto_aead_aegis256_ABYTES +) + +crypto_aead_aegis128l_KEYBYTES: int = lib.crypto_aead_aegis128l_keybytes() +crypto_aead_aegis128l_NSECBYTES: int = lib.crypto_aead_aegis128l_nsecbytes() +crypto_aead_aegis128l_NPUBBYTES: int = lib.crypto_aead_aegis128l_npubbytes() +crypto_aead_aegis128l_ABYTES: int = lib.crypto_aead_aegis128l_abytes() +crypto_aead_aegis128l_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_aegis128l_messagebytes_max() +) +_aead_aegis256_CRYPTBYTES_MAX = ( + crypto_aead_aegis128l_MESSAGEBYTES_MAX + crypto_aead_aegis128l_ABYTES +) + +crypto_aead_aes256gcm_KEYBYTES: int = lib.crypto_aead_aes256gcm_keybytes() +crypto_aead_aes256gcm_NSECBYTES: int = lib.crypto_aead_aes256gcm_nsecbytes() +crypto_aead_aes256gcm_NPUBBYTES: int = lib.crypto_aead_aes256gcm_npubbytes() +crypto_aead_aes256gcm_ABYTES: int = lib.crypto_aead_aes256gcm_abytes() +crypto_aead_aes256gcm_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_aes256gcm_messagebytes_max() +) +_aead_aegis256_CRYPTBYTES_MAX = ( + crypto_aead_aes256gcm_MESSAGEBYTES_MAX + crypto_aead_aes256gcm_ABYTES +) + + +def crypto_aead_chacha20poly1305_ietf_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the IETF ratified chacha20poly1305 + construction described in RFC7539. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_chacha20poly1305_ietf_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_ietf_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_chacha20poly1305_ietf_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_ietf_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_chacha20poly1305_ietf_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_chacha20poly1305_ietf_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_chacha20poly1305_ietf_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the IETF ratified chacha20poly1305 + construction described in RFC7539. + + :param ciphertext: + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_chacha20poly1305_ietf_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_chacha20poly1305_ietf_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_chacha20poly1305_ietf_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_ietf_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_chacha20poly1305_ietf_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_ietf_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_chacha20poly1305_ietf_ABYTES + + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_chacha20poly1305_ietf_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] + + +def crypto_aead_chacha20poly1305_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the "legacy" construction + described in draft-agl-tls-chacha20poly1305. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_chacha20poly1305_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_chacha20poly1305_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_chacha20poly1305_ietf_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_chacha20poly1305_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_chacha20poly1305_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the "legacy" construction + described in draft-agl-tls-chacha20poly1305. + + :param ciphertext: authenticated ciphertext + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_chacha20poly1305_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_chacha20poly1305_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_chacha20poly1305_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_chacha20poly1305_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_chacha20poly1305_ABYTES + + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_chacha20poly1305_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] + + +def crypto_aead_xchacha20poly1305_ietf_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the long-nonces xchacha20poly1305 + construction. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_xchacha20poly1305_ietf_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_xchacha20poly1305_ietf_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_xchacha20poly1305_ietf_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_xchacha20poly1305_ietf_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_xchacha20poly1305_ietf_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_xchacha20poly1305_ietf_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_xchacha20poly1305_ietf_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the long-nonces xchacha20poly1305 + construction. + + :param ciphertext: authenticated ciphertext + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_xchacha20poly1305_ietf_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_xchacha20poly1305_ietf_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_xchacha20poly1305_ietf_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_xchacha20poly1305_ietf_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_xchacha20poly1305_ietf_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_xchacha20poly1305_ietf_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_xchacha20poly1305_ietf_ABYTES + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_xchacha20poly1305_ietf_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] + + +def crypto_aead_aegis256_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the AEGIS-256 + construction. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_aegis256_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_aegis256_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aegis256_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aegis256_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aegis256_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aegis256_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_aegis256_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_aegis256_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_aegis256_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the AEGIS-256 + construction. + + :param ciphertext: authenticated ciphertext + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_aegis256_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_aegis256_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aegis256_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aegis256_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aegis256_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aegis256_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_aegis256_ABYTES + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_aegis256_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] + + +def crypto_aead_aegis128l_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the AEGIS-128L + construction. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_aegis128l_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_aegis128l_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aegis128l_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aegis128l_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aegis128l_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aegis128l_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_aegis128l_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_aegis128l_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_aegis128l_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the AEGIS-128L + construction. + + :param ciphertext: authenticated ciphertext + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_aegis256_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_aegis256_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aegis128l_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aegis128l_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aegis128l_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aegis128l_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_aegis128l_ABYTES + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_aegis128l_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] + + +def crypto_aead_aes256gcm_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the AES-256-GCM + construction. Requires the Intel AES-NI extensions, + or the ARM Crypto extensions. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + lib.crypto_aead_aes256gcm_is_available() == 1, + "Construction requires hardware acceleration", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_aes256gcm_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_aes256gcm_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aes256gcm_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aes256gcm_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aes256gcm_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aes256gcm_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_aes256gcm_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_aes256gcm_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_aes256gcm_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the AES-256-GCM + construction. Requires the Intel AES-NI extensions, + or the ARM Crypto extensions. + + :param ciphertext: authenticated ciphertext + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + lib.crypto_aead_aes256gcm_is_available() == 1, + "Construction requires hardware acceleration", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_aegis256_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_aegis256_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aes256gcm_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aes256gcm_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aes256gcm_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aes256gcm_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_aes256gcm_ABYTES + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_aes256gcm_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_box.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_box.py new file mode 100644 index 0000000..da6e4cb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_box.py @@ -0,0 +1,475 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Tuple + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +__all__ = ["crypto_box_keypair", "crypto_box"] + + +crypto_box_SECRETKEYBYTES: int = lib.crypto_box_secretkeybytes() +crypto_box_PUBLICKEYBYTES: int = lib.crypto_box_publickeybytes() +crypto_box_SEEDBYTES: int = lib.crypto_box_seedbytes() +crypto_box_NONCEBYTES: int = lib.crypto_box_noncebytes() +crypto_box_ZEROBYTES: int = lib.crypto_box_zerobytes() +crypto_box_BOXZEROBYTES: int = lib.crypto_box_boxzerobytes() +crypto_box_BEFORENMBYTES: int = lib.crypto_box_beforenmbytes() +crypto_box_SEALBYTES: int = lib.crypto_box_sealbytes() +crypto_box_MACBYTES: int = lib.crypto_box_macbytes() + + +def crypto_box_keypair() -> Tuple[bytes, bytes]: + """ + Returns a randomly generated public and secret key. + + :rtype: (bytes(public_key), bytes(secret_key)) + """ + pk = ffi.new("unsigned char[]", crypto_box_PUBLICKEYBYTES) + sk = ffi.new("unsigned char[]", crypto_box_SECRETKEYBYTES) + + rc = lib.crypto_box_keypair(pk, sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ( + ffi.buffer(pk, crypto_box_PUBLICKEYBYTES)[:], + ffi.buffer(sk, crypto_box_SECRETKEYBYTES)[:], + ) + + +def crypto_box_seed_keypair(seed: bytes) -> Tuple[bytes, bytes]: + """ + Returns a (public, secret) key pair deterministically generated + from an input ``seed``. + + .. warning:: The seed **must** be high-entropy; therefore, + its generator **must** be a cryptographic quality + random function like, for example, :func:`~nacl.utils.random`. + + .. warning:: The seed **must** be protected and remain secret. + Anyone who knows the seed is really in possession of + the corresponding PrivateKey. + + + :param seed: bytes + :rtype: (bytes(public_key), bytes(secret_key)) + """ + ensure(isinstance(seed, bytes), "seed must be bytes", raising=TypeError) + + if len(seed) != crypto_box_SEEDBYTES: + raise exc.ValueError("Invalid seed") + + pk = ffi.new("unsigned char[]", crypto_box_PUBLICKEYBYTES) + sk = ffi.new("unsigned char[]", crypto_box_SECRETKEYBYTES) + + rc = lib.crypto_box_seed_keypair(pk, sk, seed) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ( + ffi.buffer(pk, crypto_box_PUBLICKEYBYTES)[:], + ffi.buffer(sk, crypto_box_SECRETKEYBYTES)[:], + ) + + +def crypto_box(message: bytes, nonce: bytes, pk: bytes, sk: bytes) -> bytes: + """ + Encrypts and returns a message ``message`` using the secret key ``sk``, + public key ``pk``, and the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param pk: bytes + :param sk: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce size") + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + padded = (b"\x00" * crypto_box_ZEROBYTES) + message + ciphertext = ffi.new("unsigned char[]", len(padded)) + + rc = lib.crypto_box(ciphertext, padded, len(padded), nonce, pk, sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(ciphertext, len(padded))[crypto_box_BOXZEROBYTES:] + + +def crypto_box_open( + ciphertext: bytes, nonce: bytes, pk: bytes, sk: bytes +) -> bytes: + """ + Decrypts and returns an encrypted message ``ciphertext``, using the secret + key ``sk``, public key ``pk``, and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param pk: bytes + :param sk: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce size") + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + padded = (b"\x00" * crypto_box_BOXZEROBYTES) + ciphertext + plaintext = ffi.new("unsigned char[]", len(padded)) + + res = lib.crypto_box_open(plaintext, padded, len(padded), nonce, pk, sk) + ensure( + res == 0, + "An error occurred trying to decrypt the message", + raising=exc.CryptoError, + ) + + return ffi.buffer(plaintext, len(padded))[crypto_box_ZEROBYTES:] + + +def crypto_box_beforenm(pk: bytes, sk: bytes) -> bytes: + """ + Computes and returns the shared key for the public key ``pk`` and the + secret key ``sk``. This can be used to speed up operations where the same + set of keys is going to be used multiple times. + + :param pk: bytes + :param sk: bytes + :rtype: bytes + """ + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + k = ffi.new("unsigned char[]", crypto_box_BEFORENMBYTES) + + rc = lib.crypto_box_beforenm(k, pk, sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(k, crypto_box_BEFORENMBYTES)[:] + + +def crypto_box_afternm(message: bytes, nonce: bytes, k: bytes) -> bytes: + """ + Encrypts and returns the message ``message`` using the shared key ``k`` and + the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param k: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + if len(k) != crypto_box_BEFORENMBYTES: + raise exc.ValueError("Invalid shared key") + + padded = b"\x00" * crypto_box_ZEROBYTES + message + ciphertext = ffi.new("unsigned char[]", len(padded)) + + rc = lib.crypto_box_afternm(ciphertext, padded, len(padded), nonce, k) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(ciphertext, len(padded))[crypto_box_BOXZEROBYTES:] + + +def crypto_box_open_afternm( + ciphertext: bytes, nonce: bytes, k: bytes +) -> bytes: + """ + Decrypts and returns the encrypted message ``ciphertext``, using the shared + key ``k`` and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param k: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + if len(k) != crypto_box_BEFORENMBYTES: + raise exc.ValueError("Invalid shared key") + + padded = (b"\x00" * crypto_box_BOXZEROBYTES) + ciphertext + plaintext = ffi.new("unsigned char[]", len(padded)) + + res = lib.crypto_box_open_afternm(plaintext, padded, len(padded), nonce, k) + ensure( + res == 0, + "An error occurred trying to decrypt the message", + raising=exc.CryptoError, + ) + + return ffi.buffer(plaintext, len(padded))[crypto_box_ZEROBYTES:] + + +def crypto_box_easy( + message: bytes, nonce: bytes, pk: bytes, sk: bytes +) -> bytes: + """ + Encrypts and returns a message ``message`` using the secret key ``sk``, + public key ``pk``, and the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param pk: bytes + :param sk: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce size") + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + _mlen = len(message) + _clen = crypto_box_MACBYTES + _mlen + + ciphertext = ffi.new("unsigned char[]", _clen) + + rc = lib.crypto_box_easy(ciphertext, message, _mlen, nonce, pk, sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(ciphertext, _clen)[:] + + +def crypto_box_open_easy( + ciphertext: bytes, nonce: bytes, pk: bytes, sk: bytes +) -> bytes: + """ + Decrypts and returns an encrypted message ``ciphertext``, using the secret + key ``sk``, public key ``pk``, and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param pk: bytes + :param sk: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce size") + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + _clen = len(ciphertext) + + ensure( + _clen >= crypto_box_MACBYTES, + "Input ciphertext must be at least {} long".format( + crypto_box_MACBYTES + ), + raising=exc.TypeError, + ) + + _mlen = _clen - crypto_box_MACBYTES + + plaintext = ffi.new("unsigned char[]", max(1, _mlen)) + + res = lib.crypto_box_open_easy(plaintext, ciphertext, _clen, nonce, pk, sk) + ensure( + res == 0, + "An error occurred trying to decrypt the message", + raising=exc.CryptoError, + ) + + return ffi.buffer(plaintext, _mlen)[:] + + +def crypto_box_easy_afternm(message: bytes, nonce: bytes, k: bytes) -> bytes: + """ + Encrypts and returns the message ``message`` using the shared key ``k`` and + the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param k: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + if len(k) != crypto_box_BEFORENMBYTES: + raise exc.ValueError("Invalid shared key") + + _mlen = len(message) + _clen = crypto_box_MACBYTES + _mlen + + ciphertext = ffi.new("unsigned char[]", _clen) + + rc = lib.crypto_box_easy_afternm(ciphertext, message, _mlen, nonce, k) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(ciphertext, _clen)[:] + + +def crypto_box_open_easy_afternm( + ciphertext: bytes, nonce: bytes, k: bytes +) -> bytes: + """ + Decrypts and returns the encrypted message ``ciphertext``, using the shared + key ``k`` and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param k: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + if len(k) != crypto_box_BEFORENMBYTES: + raise exc.ValueError("Invalid shared key") + + _clen = len(ciphertext) + + ensure( + _clen >= crypto_box_MACBYTES, + "Input ciphertext must be at least {} long".format( + crypto_box_MACBYTES + ), + raising=exc.TypeError, + ) + + _mlen = _clen - crypto_box_MACBYTES + + plaintext = ffi.new("unsigned char[]", max(1, _mlen)) + + res = lib.crypto_box_open_easy_afternm( + plaintext, ciphertext, _clen, nonce, k + ) + ensure( + res == 0, + "An error occurred trying to decrypt the message", + raising=exc.CryptoError, + ) + + return ffi.buffer(plaintext, _mlen)[:] + + +def crypto_box_seal(message: bytes, pk: bytes) -> bytes: + """ + Encrypts and returns a message ``message`` using an ephemeral secret key + and the public key ``pk``. + The ephemeral public key, which is embedded in the sealed box, is also + used, in combination with ``pk``, to derive the nonce needed for the + underlying box construct. + + :param message: bytes + :param pk: bytes + :rtype: bytes + + .. versionadded:: 1.2 + """ + ensure( + isinstance(message, bytes), + "input message must be bytes", + raising=TypeError, + ) + + ensure( + isinstance(pk, bytes), "public key must be bytes", raising=TypeError + ) + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + _mlen = len(message) + _clen = crypto_box_SEALBYTES + _mlen + + ciphertext = ffi.new("unsigned char[]", _clen) + + rc = lib.crypto_box_seal(ciphertext, message, _mlen, pk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(ciphertext, _clen)[:] + + +def crypto_box_seal_open(ciphertext: bytes, pk: bytes, sk: bytes) -> bytes: + """ + Decrypts and returns an encrypted message ``ciphertext``, using the + recipent's secret key ``sk`` and the sender's ephemeral public key + embedded in the sealed box. The box construct nonce is derived from + the recipient's public key ``pk`` and the sender's public key. + + :param ciphertext: bytes + :param pk: bytes + :param sk: bytes + :rtype: bytes + + .. versionadded:: 1.2 + """ + ensure( + isinstance(ciphertext, bytes), + "input ciphertext must be bytes", + raising=TypeError, + ) + + ensure( + isinstance(pk, bytes), "public key must be bytes", raising=TypeError + ) + + ensure( + isinstance(sk, bytes), "secret key must be bytes", raising=TypeError + ) + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + _clen = len(ciphertext) + + ensure( + _clen >= crypto_box_SEALBYTES, + ("Input ciphertext must be at least {} long").format( + crypto_box_SEALBYTES + ), + raising=exc.TypeError, + ) + + _mlen = _clen - crypto_box_SEALBYTES + + # zero-length malloc results are implementation.dependent + plaintext = ffi.new("unsigned char[]", max(1, _mlen)) + + res = lib.crypto_box_seal_open(plaintext, ciphertext, _clen, pk, sk) + ensure( + res == 0, + "An error occurred trying to decrypt the message", + raising=exc.CryptoError, + ) + + return ffi.buffer(plaintext, _mlen)[:] diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_core.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_core.py new file mode 100644 index 0000000..e64a064 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_core.py @@ -0,0 +1,449 @@ +# Copyright 2018 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +has_crypto_core_ed25519 = bool(lib.PYNACL_HAS_CRYPTO_CORE_ED25519) + +crypto_core_ed25519_BYTES = 0 +crypto_core_ed25519_SCALARBYTES = 0 +crypto_core_ed25519_NONREDUCEDSCALARBYTES = 0 + +if has_crypto_core_ed25519: + crypto_core_ed25519_BYTES = lib.crypto_core_ed25519_bytes() + crypto_core_ed25519_SCALARBYTES = lib.crypto_core_ed25519_scalarbytes() + crypto_core_ed25519_NONREDUCEDSCALARBYTES = ( + lib.crypto_core_ed25519_nonreducedscalarbytes() + ) + + +def crypto_core_ed25519_is_valid_point(p: bytes) -> bool: + """ + Check if ``p`` represents a point on the edwards25519 curve, in canonical + form, on the main subgroup, and that the point doesn't have a small order. + + :param p: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type p: bytes + :return: point validity + :rtype: bool + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) and len(p) == crypto_core_ed25519_BYTES, + "Point must be a crypto_core_ed25519_BYTES long bytes sequence", + raising=exc.TypeError, + ) + + rc = lib.crypto_core_ed25519_is_valid_point(p) + return rc == 1 + + +def crypto_core_ed25519_from_uniform(r: bytes) -> bytes: + """ + Maps a 32 bytes vector ``r`` to a point. The point is guaranteed to be on the main subgroup. + This function directly exposes the Elligator 2 map, uses the high bit to set + the sign of the X coordinate, and the resulting point is multiplied by the cofactor. + + :param r: a :py:data:`.crypto_core_ed25519_BYTES` long bytes + sequence representing arbitrary data + :type r: bytes + :return: a point on the edwards25519 curve main order subgroup, represented as a + :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(r, bytes) and len(r) == crypto_core_ed25519_BYTES, + "Integer r must be a {} long bytes sequence".format( + "crypto_core_ed25519_BYTES" + ), + raising=exc.TypeError, + ) + + p = ffi.new("unsigned char[]", crypto_core_ed25519_BYTES) + + rc = lib.crypto_core_ed25519_from_uniform(p, r) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(p, crypto_core_ed25519_BYTES)[:] + + +def crypto_core_ed25519_add(p: bytes, q: bytes) -> bytes: + """ + Add two points on the edwards25519 curve. + + :param p: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type p: bytes + :param q: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type q: bytes + :return: a point on the edwards25519 curve represented as + a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) + and isinstance(q, bytes) + and len(p) == crypto_core_ed25519_BYTES + and len(q) == crypto_core_ed25519_BYTES, + "Each point must be a {} long bytes sequence".format( + "crypto_core_ed25519_BYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_BYTES) + + rc = lib.crypto_core_ed25519_add(r, p, q) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(r, crypto_core_ed25519_BYTES)[:] + + +def crypto_core_ed25519_sub(p: bytes, q: bytes) -> bytes: + """ + Subtract a point from another on the edwards25519 curve. + + :param p: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type p: bytes + :param q: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type q: bytes + :return: a point on the edwards25519 curve represented as + a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) + and isinstance(q, bytes) + and len(p) == crypto_core_ed25519_BYTES + and len(q) == crypto_core_ed25519_BYTES, + "Each point must be a {} long bytes sequence".format( + "crypto_core_ed25519_BYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_BYTES) + + rc = lib.crypto_core_ed25519_sub(r, p, q) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(r, crypto_core_ed25519_BYTES)[:] + + +def crypto_core_ed25519_scalar_invert(s: bytes) -> bytes: + """ + Return the multiplicative inverse of integer ``s`` modulo ``L``, + i.e an integer ``i`` such that ``s * i = 1 (mod L)``, where ``L`` + is the order of the main subgroup. + + Raises a ``exc.RuntimeError`` if ``s`` is the integer zero. + + :param s: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type s: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(s, bytes) and len(s) == crypto_core_ed25519_SCALARBYTES, + "Integer s must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + rc = lib.crypto_core_ed25519_scalar_invert(r, s) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_negate(s: bytes) -> bytes: + """ + Return the integer ``n`` such that ``s + n = 0 (mod L)``, where ``L`` + is the order of the main subgroup. + + :param s: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type s: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(s, bytes) and len(s) == crypto_core_ed25519_SCALARBYTES, + "Integer s must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_negate(r, s) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_complement(s: bytes) -> bytes: + """ + Return the complement of integer ``s`` modulo ``L``, i.e. an integer + ``c`` such that ``s + c = 1 (mod L)``, where ``L`` is the order of + the main subgroup. + + :param s: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type s: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(s, bytes) and len(s) == crypto_core_ed25519_SCALARBYTES, + "Integer s must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_complement(r, s) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_add(p: bytes, q: bytes) -> bytes: + """ + Add integers ``p`` and ``q`` modulo ``L``, where ``L`` is the order of + the main subgroup. + + :param p: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type p: bytes + :param q: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type q: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) + and isinstance(q, bytes) + and len(p) == crypto_core_ed25519_SCALARBYTES + and len(q) == crypto_core_ed25519_SCALARBYTES, + "Each integer must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_add(r, p, q) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_sub(p: bytes, q: bytes) -> bytes: + """ + Subtract integers ``p`` and ``q`` modulo ``L``, where ``L`` is the + order of the main subgroup. + + :param p: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type p: bytes + :param q: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type q: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) + and isinstance(q, bytes) + and len(p) == crypto_core_ed25519_SCALARBYTES + and len(q) == crypto_core_ed25519_SCALARBYTES, + "Each integer must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_sub(r, p, q) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_mul(p: bytes, q: bytes) -> bytes: + """ + Multiply integers ``p`` and ``q`` modulo ``L``, where ``L`` is the + order of the main subgroup. + + :param p: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type p: bytes + :param q: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type q: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) + and isinstance(q, bytes) + and len(p) == crypto_core_ed25519_SCALARBYTES + and len(q) == crypto_core_ed25519_SCALARBYTES, + "Each integer must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_mul(r, p, q) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_reduce(s: bytes) -> bytes: + """ + Reduce integer ``s`` to ``s`` modulo ``L``, where ``L`` is the order + of the main subgroup. + + :param s: a :py:data:`.crypto_core_ed25519_NONREDUCEDSCALARBYTES` + long bytes sequence representing an integer + :type s: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(s, bytes) + and len(s) == crypto_core_ed25519_NONREDUCEDSCALARBYTES, + "Integer s must be a {} long bytes sequence".format( + "crypto_core_ed25519_NONREDUCEDSCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_reduce(r, s) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_generichash.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_generichash.py new file mode 100644 index 0000000..6ab385a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_generichash.py @@ -0,0 +1,281 @@ +# Copyright 2013-2019 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import NoReturn, TypeVar + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +crypto_generichash_BYTES: int = lib.crypto_generichash_blake2b_bytes() +crypto_generichash_BYTES_MIN: int = lib.crypto_generichash_blake2b_bytes_min() +crypto_generichash_BYTES_MAX: int = lib.crypto_generichash_blake2b_bytes_max() +crypto_generichash_KEYBYTES: int = lib.crypto_generichash_blake2b_keybytes() +crypto_generichash_KEYBYTES_MIN: int = ( + lib.crypto_generichash_blake2b_keybytes_min() +) +crypto_generichash_KEYBYTES_MAX: int = ( + lib.crypto_generichash_blake2b_keybytes_max() +) +crypto_generichash_SALTBYTES: int = lib.crypto_generichash_blake2b_saltbytes() +crypto_generichash_PERSONALBYTES: int = ( + lib.crypto_generichash_blake2b_personalbytes() +) +crypto_generichash_STATEBYTES: int = lib.crypto_generichash_statebytes() + +_OVERLONG = "{0} length greater than {1} bytes" +_TOOBIG = "{0} greater than {1}" + + +def _checkparams( + digest_size: int, key: bytes, salt: bytes, person: bytes +) -> None: + """Check hash parameters""" + ensure( + isinstance(key, bytes), + "Key must be a bytes sequence", + raising=exc.TypeError, + ) + + ensure( + isinstance(salt, bytes), + "Salt must be a bytes sequence", + raising=exc.TypeError, + ) + + ensure( + isinstance(person, bytes), + "Person must be a bytes sequence", + raising=exc.TypeError, + ) + + ensure( + isinstance(digest_size, int), + "Digest size must be an integer number", + raising=exc.TypeError, + ) + + ensure( + digest_size <= crypto_generichash_BYTES_MAX, + _TOOBIG.format("Digest_size", crypto_generichash_BYTES_MAX), + raising=exc.ValueError, + ) + + ensure( + len(key) <= crypto_generichash_KEYBYTES_MAX, + _OVERLONG.format("Key", crypto_generichash_KEYBYTES_MAX), + raising=exc.ValueError, + ) + + ensure( + len(salt) <= crypto_generichash_SALTBYTES, + _OVERLONG.format("Salt", crypto_generichash_SALTBYTES), + raising=exc.ValueError, + ) + + ensure( + len(person) <= crypto_generichash_PERSONALBYTES, + _OVERLONG.format("Person", crypto_generichash_PERSONALBYTES), + raising=exc.ValueError, + ) + + +def generichash_blake2b_salt_personal( + data: bytes, + digest_size: int = crypto_generichash_BYTES, + key: bytes = b"", + salt: bytes = b"", + person: bytes = b"", +) -> bytes: + """One shot hash interface + + :param data: the input data to the hash function + :type data: bytes + :param digest_size: must be at most + :py:data:`.crypto_generichash_BYTES_MAX`; + the default digest size is + :py:data:`.crypto_generichash_BYTES` + :type digest_size: int + :param key: must be at most + :py:data:`.crypto_generichash_KEYBYTES_MAX` long + :type key: bytes + :param salt: must be at most + :py:data:`.crypto_generichash_SALTBYTES` long; + will be zero-padded if needed + :type salt: bytes + :param person: must be at most + :py:data:`.crypto_generichash_PERSONALBYTES` long: + will be zero-padded if needed + :type person: bytes + :return: digest_size long digest + :rtype: bytes + """ + + _checkparams(digest_size, key, salt, person) + + ensure( + isinstance(data, bytes), + "Input data must be a bytes sequence", + raising=exc.TypeError, + ) + + digest = ffi.new("unsigned char[]", digest_size) + + # both _salt and _personal must be zero-padded to the correct length + _salt = ffi.new("unsigned char []", crypto_generichash_SALTBYTES) + _person = ffi.new("unsigned char []", crypto_generichash_PERSONALBYTES) + + ffi.memmove(_salt, salt, len(salt)) + ffi.memmove(_person, person, len(person)) + + rc = lib.crypto_generichash_blake2b_salt_personal( + digest, digest_size, data, len(data), key, len(key), _salt, _person + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + return ffi.buffer(digest, digest_size)[:] + + +_Blake2State = TypeVar("_Blake2State", bound="Blake2State") + + +class Blake2State: + """ + Python-level wrapper for the crypto_generichash_blake2b state buffer + """ + + __slots__ = ["_statebuf", "digest_size"] + + def __init__(self, digest_size: int): + self._statebuf = ffi.new( + "unsigned char[]", crypto_generichash_STATEBYTES + ) + self.digest_size = digest_size + + def __reduce__(self) -> NoReturn: + """ + Raise the same exception as hashlib's blake implementation + on copy.copy() + """ + raise TypeError( + "can't pickle {} objects".format(self.__class__.__name__) + ) + + def copy(self: _Blake2State) -> _Blake2State: + _st = self.__class__(self.digest_size) + ffi.memmove( + _st._statebuf, self._statebuf, crypto_generichash_STATEBYTES + ) + return _st + + +def generichash_blake2b_init( + key: bytes = b"", + salt: bytes = b"", + person: bytes = b"", + digest_size: int = crypto_generichash_BYTES, +) -> Blake2State: + """ + Create a new initialized blake2b hash state + + :param key: must be at most + :py:data:`.crypto_generichash_KEYBYTES_MAX` long + :type key: bytes + :param salt: must be at most + :py:data:`.crypto_generichash_SALTBYTES` long; + will be zero-padded if needed + :type salt: bytes + :param person: must be at most + :py:data:`.crypto_generichash_PERSONALBYTES` long: + will be zero-padded if needed + :type person: bytes + :param digest_size: must be at most + :py:data:`.crypto_generichash_BYTES_MAX`; + the default digest size is + :py:data:`.crypto_generichash_BYTES` + :type digest_size: int + :return: a initialized :py:class:`.Blake2State` + :rtype: object + """ + + _checkparams(digest_size, key, salt, person) + + state = Blake2State(digest_size) + + # both _salt and _personal must be zero-padded to the correct length + _salt = ffi.new("unsigned char []", crypto_generichash_SALTBYTES) + _person = ffi.new("unsigned char []", crypto_generichash_PERSONALBYTES) + + ffi.memmove(_salt, salt, len(salt)) + ffi.memmove(_person, person, len(person)) + + rc = lib.crypto_generichash_blake2b_init_salt_personal( + state._statebuf, key, len(key), digest_size, _salt, _person + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + return state + + +def generichash_blake2b_update(state: Blake2State, data: bytes) -> None: + """Update the blake2b hash state + + :param state: a initialized Blake2bState object as returned from + :py:func:`.crypto_generichash_blake2b_init` + :type state: :py:class:`.Blake2State` + :param data: + :type data: bytes + """ + + ensure( + isinstance(state, Blake2State), + "State must be a Blake2State object", + raising=exc.TypeError, + ) + + ensure( + isinstance(data, bytes), + "Input data must be a bytes sequence", + raising=exc.TypeError, + ) + + rc = lib.crypto_generichash_blake2b_update( + state._statebuf, data, len(data) + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + +def generichash_blake2b_final(state: Blake2State) -> bytes: + """Finalize the blake2b hash state and return the digest. + + :param state: a initialized Blake2bState object as returned from + :py:func:`.crypto_generichash_blake2b_init` + :type state: :py:class:`.Blake2State` + :return: the blake2 digest of the passed-in data stream + :rtype: bytes + """ + + ensure( + isinstance(state, Blake2State), + "State must be a Blake2State object", + raising=exc.TypeError, + ) + + _digest = ffi.new("unsigned char[]", crypto_generichash_BYTES_MAX) + rc = lib.crypto_generichash_blake2b_final( + state._statebuf, _digest, state.digest_size + ) + + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + return ffi.buffer(_digest, state.digest_size)[:] diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_hash.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_hash.py new file mode 100644 index 0000000..2bab399 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_hash.py @@ -0,0 +1,63 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +# crypto_hash_BYTES = lib.crypto_hash_bytes() +crypto_hash_BYTES: int = lib.crypto_hash_sha512_bytes() +crypto_hash_sha256_BYTES: int = lib.crypto_hash_sha256_bytes() +crypto_hash_sha512_BYTES: int = lib.crypto_hash_sha512_bytes() + + +def crypto_hash(message: bytes) -> bytes: + """ + Hashes and returns the message ``message``. + + :param message: bytes + :rtype: bytes + """ + digest = ffi.new("unsigned char[]", crypto_hash_BYTES) + rc = lib.crypto_hash(digest, message, len(message)) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + return ffi.buffer(digest, crypto_hash_BYTES)[:] + + +def crypto_hash_sha256(message: bytes) -> bytes: + """ + Hashes and returns the message ``message``. + + :param message: bytes + :rtype: bytes + """ + digest = ffi.new("unsigned char[]", crypto_hash_sha256_BYTES) + rc = lib.crypto_hash_sha256(digest, message, len(message)) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + return ffi.buffer(digest, crypto_hash_sha256_BYTES)[:] + + +def crypto_hash_sha512(message: bytes) -> bytes: + """ + Hashes and returns the message ``message``. + + :param message: bytes + :rtype: bytes + """ + digest = ffi.new("unsigned char[]", crypto_hash_sha512_BYTES) + rc = lib.crypto_hash_sha512(digest, message, len(message)) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + return ffi.buffer(digest, crypto_hash_sha512_BYTES)[:] diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_kx.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_kx.py new file mode 100644 index 0000000..3c649e4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_kx.py @@ -0,0 +1,200 @@ +# Copyright 2018 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Tuple + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + +__all__ = [ + "crypto_kx_keypair", + "crypto_kx_client_session_keys", + "crypto_kx_server_session_keys", + "crypto_kx_PUBLIC_KEY_BYTES", + "crypto_kx_SECRET_KEY_BYTES", + "crypto_kx_SEED_BYTES", + "crypto_kx_SESSION_KEY_BYTES", +] + +""" +Implementations of client, server key exchange +""" +crypto_kx_PUBLIC_KEY_BYTES: int = lib.crypto_kx_publickeybytes() +crypto_kx_SECRET_KEY_BYTES: int = lib.crypto_kx_secretkeybytes() +crypto_kx_SEED_BYTES: int = lib.crypto_kx_seedbytes() +crypto_kx_SESSION_KEY_BYTES: int = lib.crypto_kx_sessionkeybytes() + + +def crypto_kx_keypair() -> Tuple[bytes, bytes]: + """ + Generate a key pair. + This is a duplicate crypto_box_keypair, but + is included for api consistency. + :return: (public_key, secret_key) + :rtype: (bytes, bytes) + """ + public_key = ffi.new("unsigned char[]", crypto_kx_PUBLIC_KEY_BYTES) + secret_key = ffi.new("unsigned char[]", crypto_kx_SECRET_KEY_BYTES) + res = lib.crypto_kx_keypair(public_key, secret_key) + ensure(res == 0, "Key generation failed.", raising=exc.CryptoError) + + return ( + ffi.buffer(public_key, crypto_kx_PUBLIC_KEY_BYTES)[:], + ffi.buffer(secret_key, crypto_kx_SECRET_KEY_BYTES)[:], + ) + + +def crypto_kx_seed_keypair(seed: bytes) -> Tuple[bytes, bytes]: + """ + Generate a key pair with a given seed. + This is functionally the same as crypto_box_seed_keypair, however + it uses the blake2b hash primitive instead of sha512. + It is included mainly for api consistency when using crypto_kx. + :param seed: random seed + :type seed: bytes + :return: (public_key, secret_key) + :rtype: (bytes, bytes) + """ + public_key = ffi.new("unsigned char[]", crypto_kx_PUBLIC_KEY_BYTES) + secret_key = ffi.new("unsigned char[]", crypto_kx_SECRET_KEY_BYTES) + ensure( + isinstance(seed, bytes) and len(seed) == crypto_kx_SEED_BYTES, + "Seed must be a {} byte long bytes sequence".format( + crypto_kx_SEED_BYTES + ), + raising=exc.TypeError, + ) + res = lib.crypto_kx_seed_keypair(public_key, secret_key, seed) + ensure(res == 0, "Key generation failed.", raising=exc.CryptoError) + + return ( + ffi.buffer(public_key, crypto_kx_PUBLIC_KEY_BYTES)[:], + ffi.buffer(secret_key, crypto_kx_SECRET_KEY_BYTES)[:], + ) + + +def crypto_kx_client_session_keys( + client_public_key: bytes, + client_secret_key: bytes, + server_public_key: bytes, +) -> Tuple[bytes, bytes]: + """ + Generate session keys for the client. + :param client_public_key: + :type client_public_key: bytes + :param client_secret_key: + :type client_secret_key: bytes + :param server_public_key: + :type server_public_key: bytes + :return: (rx_key, tx_key) + :rtype: (bytes, bytes) + """ + ensure( + isinstance(client_public_key, bytes) + and len(client_public_key) == crypto_kx_PUBLIC_KEY_BYTES, + "Client public key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + ensure( + isinstance(client_secret_key, bytes) + and len(client_secret_key) == crypto_kx_SECRET_KEY_BYTES, + "Client secret key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + ensure( + isinstance(server_public_key, bytes) + and len(server_public_key) == crypto_kx_PUBLIC_KEY_BYTES, + "Server public key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + + rx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES) + tx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES) + res = lib.crypto_kx_client_session_keys( + rx_key, tx_key, client_public_key, client_secret_key, server_public_key + ) + ensure( + res == 0, + "Client session key generation failed.", + raising=exc.CryptoError, + ) + + return ( + ffi.buffer(rx_key, crypto_kx_SESSION_KEY_BYTES)[:], + ffi.buffer(tx_key, crypto_kx_SESSION_KEY_BYTES)[:], + ) + + +def crypto_kx_server_session_keys( + server_public_key: bytes, + server_secret_key: bytes, + client_public_key: bytes, +) -> Tuple[bytes, bytes]: + """ + Generate session keys for the server. + :param server_public_key: + :type server_public_key: bytes + :param server_secret_key: + :type server_secret_key: bytes + :param client_public_key: + :type client_public_key: bytes + :return: (rx_key, tx_key) + :rtype: (bytes, bytes) + """ + ensure( + isinstance(server_public_key, bytes) + and len(server_public_key) == crypto_kx_PUBLIC_KEY_BYTES, + "Server public key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + ensure( + isinstance(server_secret_key, bytes) + and len(server_secret_key) == crypto_kx_SECRET_KEY_BYTES, + "Server secret key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + ensure( + isinstance(client_public_key, bytes) + and len(client_public_key) == crypto_kx_PUBLIC_KEY_BYTES, + "Client public key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + + rx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES) + tx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES) + res = lib.crypto_kx_server_session_keys( + rx_key, tx_key, server_public_key, server_secret_key, client_public_key + ) + ensure( + res == 0, + "Server session key generation failed.", + raising=exc.CryptoError, + ) + + return ( + ffi.buffer(rx_key, crypto_kx_SESSION_KEY_BYTES)[:], + ffi.buffer(tx_key, crypto_kx_SESSION_KEY_BYTES)[:], + ) diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_pwhash.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_pwhash.py new file mode 100644 index 0000000..7f62360 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_pwhash.py @@ -0,0 +1,599 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +from typing import Tuple + +import nacl.exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +has_crypto_pwhash_scryptsalsa208sha256 = bool( + lib.PYNACL_HAS_CRYPTO_PWHASH_SCRYPTSALSA208SHA256 +) + +crypto_pwhash_scryptsalsa208sha256_STRPREFIX = b"" +crypto_pwhash_scryptsalsa208sha256_SALTBYTES = 0 +crypto_pwhash_scryptsalsa208sha256_STRBYTES = 0 +crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN = 0 +crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX = 0 +crypto_pwhash_scryptsalsa208sha256_BYTES_MIN = 0 +crypto_pwhash_scryptsalsa208sha256_BYTES_MAX = 0 +crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN = 0 +crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX = 0 +crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN = 0 +crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX = 0 +crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE = 0 +crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE = 0 +crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE = 0 +crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE = 0 + +if has_crypto_pwhash_scryptsalsa208sha256: + crypto_pwhash_scryptsalsa208sha256_STRPREFIX = ffi.string( + ffi.cast("char *", lib.crypto_pwhash_scryptsalsa208sha256_strprefix()) + )[:] + crypto_pwhash_scryptsalsa208sha256_SALTBYTES = ( + lib.crypto_pwhash_scryptsalsa208sha256_saltbytes() + ) + crypto_pwhash_scryptsalsa208sha256_STRBYTES = ( + lib.crypto_pwhash_scryptsalsa208sha256_strbytes() + ) + crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN = ( + lib.crypto_pwhash_scryptsalsa208sha256_passwd_min() + ) + crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX = ( + lib.crypto_pwhash_scryptsalsa208sha256_passwd_max() + ) + crypto_pwhash_scryptsalsa208sha256_BYTES_MIN = ( + lib.crypto_pwhash_scryptsalsa208sha256_bytes_min() + ) + crypto_pwhash_scryptsalsa208sha256_BYTES_MAX = ( + lib.crypto_pwhash_scryptsalsa208sha256_bytes_max() + ) + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN = ( + lib.crypto_pwhash_scryptsalsa208sha256_memlimit_min() + ) + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX = ( + lib.crypto_pwhash_scryptsalsa208sha256_memlimit_max() + ) + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN = ( + lib.crypto_pwhash_scryptsalsa208sha256_opslimit_min() + ) + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX = ( + lib.crypto_pwhash_scryptsalsa208sha256_opslimit_max() + ) + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE = ( + lib.crypto_pwhash_scryptsalsa208sha256_opslimit_interactive() + ) + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE = ( + lib.crypto_pwhash_scryptsalsa208sha256_memlimit_interactive() + ) + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE = ( + lib.crypto_pwhash_scryptsalsa208sha256_opslimit_sensitive() + ) + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE = ( + lib.crypto_pwhash_scryptsalsa208sha256_memlimit_sensitive() + ) + +crypto_pwhash_ALG_ARGON2I13: int = lib.crypto_pwhash_alg_argon2i13() +crypto_pwhash_ALG_ARGON2ID13: int = lib.crypto_pwhash_alg_argon2id13() +crypto_pwhash_ALG_DEFAULT: int = lib.crypto_pwhash_alg_default() + +crypto_pwhash_SALTBYTES: int = lib.crypto_pwhash_saltbytes() +crypto_pwhash_STRBYTES: int = lib.crypto_pwhash_strbytes() + +crypto_pwhash_PASSWD_MIN: int = lib.crypto_pwhash_passwd_min() +crypto_pwhash_PASSWD_MAX: int = lib.crypto_pwhash_passwd_max() +crypto_pwhash_BYTES_MIN: int = lib.crypto_pwhash_bytes_min() +crypto_pwhash_BYTES_MAX: int = lib.crypto_pwhash_bytes_max() + +crypto_pwhash_argon2i_STRPREFIX: bytes = ffi.string( + ffi.cast("char *", lib.crypto_pwhash_argon2i_strprefix()) +)[:] +crypto_pwhash_argon2i_MEMLIMIT_MIN: int = ( + lib.crypto_pwhash_argon2i_memlimit_min() +) +crypto_pwhash_argon2i_MEMLIMIT_MAX: int = ( + lib.crypto_pwhash_argon2i_memlimit_max() +) +crypto_pwhash_argon2i_OPSLIMIT_MIN: int = ( + lib.crypto_pwhash_argon2i_opslimit_min() +) +crypto_pwhash_argon2i_OPSLIMIT_MAX: int = ( + lib.crypto_pwhash_argon2i_opslimit_max() +) +crypto_pwhash_argon2i_OPSLIMIT_INTERACTIVE: int = ( + lib.crypto_pwhash_argon2i_opslimit_interactive() +) +crypto_pwhash_argon2i_MEMLIMIT_INTERACTIVE: int = ( + lib.crypto_pwhash_argon2i_memlimit_interactive() +) +crypto_pwhash_argon2i_OPSLIMIT_MODERATE: int = ( + lib.crypto_pwhash_argon2i_opslimit_moderate() +) +crypto_pwhash_argon2i_MEMLIMIT_MODERATE: int = ( + lib.crypto_pwhash_argon2i_memlimit_moderate() +) +crypto_pwhash_argon2i_OPSLIMIT_SENSITIVE: int = ( + lib.crypto_pwhash_argon2i_opslimit_sensitive() +) +crypto_pwhash_argon2i_MEMLIMIT_SENSITIVE: int = ( + lib.crypto_pwhash_argon2i_memlimit_sensitive() +) + +crypto_pwhash_argon2id_STRPREFIX: bytes = ffi.string( + ffi.cast("char *", lib.crypto_pwhash_argon2id_strprefix()) +)[:] +crypto_pwhash_argon2id_MEMLIMIT_MIN: int = ( + lib.crypto_pwhash_argon2id_memlimit_min() +) +crypto_pwhash_argon2id_MEMLIMIT_MAX: int = ( + lib.crypto_pwhash_argon2id_memlimit_max() +) +crypto_pwhash_argon2id_OPSLIMIT_MIN: int = ( + lib.crypto_pwhash_argon2id_opslimit_min() +) +crypto_pwhash_argon2id_OPSLIMIT_MAX: int = ( + lib.crypto_pwhash_argon2id_opslimit_max() +) +crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE: int = ( + lib.crypto_pwhash_argon2id_opslimit_interactive() +) +crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE: int = ( + lib.crypto_pwhash_argon2id_memlimit_interactive() +) +crypto_pwhash_argon2id_OPSLIMIT_MODERATE: int = ( + lib.crypto_pwhash_argon2id_opslimit_moderate() +) +crypto_pwhash_argon2id_MEMLIMIT_MODERATE: int = ( + lib.crypto_pwhash_argon2id_memlimit_moderate() +) +crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE: int = ( + lib.crypto_pwhash_argon2id_opslimit_sensitive() +) +crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE: int = ( + lib.crypto_pwhash_argon2id_memlimit_sensitive() +) + +SCRYPT_OPSLIMIT_INTERACTIVE = ( + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE +) +SCRYPT_MEMLIMIT_INTERACTIVE = ( + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE +) +SCRYPT_OPSLIMIT_SENSITIVE = ( + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE +) +SCRYPT_MEMLIMIT_SENSITIVE = ( + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE +) +SCRYPT_SALTBYTES = crypto_pwhash_scryptsalsa208sha256_SALTBYTES +SCRYPT_STRBYTES = crypto_pwhash_scryptsalsa208sha256_STRBYTES + +SCRYPT_PR_MAX = (1 << 30) - 1 +LOG2_UINT64_MAX = 63 +UINT64_MAX = (1 << 64) - 1 +SCRYPT_MAX_MEM = 32 * (1024 * 1024) + + +def _check_memory_occupation( + n: int, r: int, p: int, maxmem: int = SCRYPT_MAX_MEM +) -> None: + ensure(r != 0, "Invalid block size", raising=exc.ValueError) + + ensure(p != 0, "Invalid parallelization factor", raising=exc.ValueError) + + ensure( + (n & (n - 1)) == 0, + "Cost factor must be a power of 2", + raising=exc.ValueError, + ) + + ensure(n > 1, "Cost factor must be at least 2", raising=exc.ValueError) + + ensure( + p <= SCRYPT_PR_MAX / r, + "p*r is greater than {}".format(SCRYPT_PR_MAX), + raising=exc.ValueError, + ) + + ensure(n < (1 << (16 * r)), raising=exc.ValueError) + + Blen = p * 128 * r + + i = UINT64_MAX / 128 + + ensure(n + 2 <= i / r, raising=exc.ValueError) + + Vlen = 32 * r * (n + 2) * 4 + + ensure(Blen <= UINT64_MAX - Vlen, raising=exc.ValueError) + + ensure(Blen <= sys.maxsize - Vlen, raising=exc.ValueError) + + ensure( + Blen + Vlen <= maxmem, + "Memory limit would be exceeded with the chosen n, r, p", + raising=exc.ValueError, + ) + + +def nacl_bindings_pick_scrypt_params( + opslimit: int, memlimit: int +) -> Tuple[int, int, int]: + """Python implementation of libsodium's pickparams""" + + if opslimit < 32768: + opslimit = 32768 + + r = 8 + + if opslimit < (memlimit // 32): + p = 1 + maxn = opslimit // (4 * r) + for n_log2 in range(1, 63): # pragma: no branch + if (2**n_log2) > (maxn // 2): + break + else: + maxn = memlimit // (r * 128) + for n_log2 in range(1, 63): # pragma: no branch + if (2**n_log2) > maxn // 2: + break + + maxrp = (opslimit // 4) // (2**n_log2) + + if maxrp > 0x3FFFFFFF: # pragma: no cover + maxrp = 0x3FFFFFFF + + p = maxrp // r + + return n_log2, r, p + + +def crypto_pwhash_scryptsalsa208sha256_ll( + passwd: bytes, + salt: bytes, + n: int, + r: int, + p: int, + dklen: int = 64, + maxmem: int = SCRYPT_MAX_MEM, +) -> bytes: + """ + Derive a cryptographic key using the ``passwd`` and ``salt`` + given as input. + + The work factor can be tuned by by picking different + values for the parameters + + :param bytes passwd: + :param bytes salt: + :param bytes salt: *must* be *exactly* :py:const:`.SALTBYTES` long + :param int dklen: + :param int opslimit: + :param int n: + :param int r: block size, + :param int p: the parallelism factor + :param int maxmem: the maximum available memory available for scrypt's + operations + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_pwhash_scryptsalsa208sha256, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure(isinstance(n, int), raising=TypeError) + ensure(isinstance(r, int), raising=TypeError) + ensure(isinstance(p, int), raising=TypeError) + + ensure(isinstance(passwd, bytes), raising=TypeError) + ensure(isinstance(salt, bytes), raising=TypeError) + + _check_memory_occupation(n, r, p, maxmem) + + buf = ffi.new("uint8_t[]", dklen) + + ret = lib.crypto_pwhash_scryptsalsa208sha256_ll( + passwd, len(passwd), salt, len(salt), n, r, p, buf, dklen + ) + + ensure( + ret == 0, + "Unexpected failure in key derivation", + raising=exc.RuntimeError, + ) + + return ffi.buffer(ffi.cast("char *", buf), dklen)[:] + + +def crypto_pwhash_scryptsalsa208sha256_str( + passwd: bytes, + opslimit: int = SCRYPT_OPSLIMIT_INTERACTIVE, + memlimit: int = SCRYPT_MEMLIMIT_INTERACTIVE, +) -> bytes: + """ + Derive a cryptographic key using the ``passwd`` and ``salt`` + given as input, returning a string representation which includes + the salt and the tuning parameters. + + The returned string can be directly stored as a password hash. + + See :py:func:`.crypto_pwhash_scryptsalsa208sha256` for a short + discussion about ``opslimit`` and ``memlimit`` values. + + :param bytes passwd: + :param int opslimit: + :param int memlimit: + :return: serialized key hash, including salt and tuning parameters + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_pwhash_scryptsalsa208sha256, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + buf = ffi.new("char[]", SCRYPT_STRBYTES) + + ret = lib.crypto_pwhash_scryptsalsa208sha256_str( + buf, passwd, len(passwd), opslimit, memlimit + ) + + ensure( + ret == 0, + "Unexpected failure in password hashing", + raising=exc.RuntimeError, + ) + + return ffi.string(buf) + + +def crypto_pwhash_scryptsalsa208sha256_str_verify( + passwd_hash: bytes, passwd: bytes +) -> bool: + """ + Verifies the ``passwd`` against the ``passwd_hash`` that was generated. + Returns True or False depending on the success + + :param passwd_hash: bytes + :param passwd: bytes + :rtype: boolean + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_pwhash_scryptsalsa208sha256, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + len(passwd_hash) == SCRYPT_STRBYTES - 1, + "Invalid password hash", + raising=exc.ValueError, + ) + + ret = lib.crypto_pwhash_scryptsalsa208sha256_str_verify( + passwd_hash, passwd, len(passwd) + ) + ensure(ret == 0, "Wrong password", raising=exc.InvalidkeyError) + # all went well, therefore: + return True + + +def _check_argon2_limits_alg(opslimit: int, memlimit: int, alg: int) -> None: + if alg == crypto_pwhash_ALG_ARGON2I13: + if memlimit < crypto_pwhash_argon2i_MEMLIMIT_MIN: + raise exc.ValueError( + "memlimit must be at least {} bytes".format( + crypto_pwhash_argon2i_MEMLIMIT_MIN + ) + ) + elif memlimit > crypto_pwhash_argon2i_MEMLIMIT_MAX: + raise exc.ValueError( + "memlimit must be at most {} bytes".format( + crypto_pwhash_argon2i_MEMLIMIT_MAX + ) + ) + if opslimit < crypto_pwhash_argon2i_OPSLIMIT_MIN: + raise exc.ValueError( + "opslimit must be at least {}".format( + crypto_pwhash_argon2i_OPSLIMIT_MIN + ) + ) + elif opslimit > crypto_pwhash_argon2i_OPSLIMIT_MAX: + raise exc.ValueError( + "opslimit must be at most {}".format( + crypto_pwhash_argon2i_OPSLIMIT_MAX + ) + ) + + elif alg == crypto_pwhash_ALG_ARGON2ID13: + if memlimit < crypto_pwhash_argon2id_MEMLIMIT_MIN: + raise exc.ValueError( + "memlimit must be at least {} bytes".format( + crypto_pwhash_argon2id_MEMLIMIT_MIN + ) + ) + elif memlimit > crypto_pwhash_argon2id_MEMLIMIT_MAX: + raise exc.ValueError( + "memlimit must be at most {} bytes".format( + crypto_pwhash_argon2id_MEMLIMIT_MAX + ) + ) + if opslimit < crypto_pwhash_argon2id_OPSLIMIT_MIN: + raise exc.ValueError( + "opslimit must be at least {}".format( + crypto_pwhash_argon2id_OPSLIMIT_MIN + ) + ) + elif opslimit > crypto_pwhash_argon2id_OPSLIMIT_MAX: + raise exc.ValueError( + "opslimit must be at most {}".format( + crypto_pwhash_argon2id_OPSLIMIT_MAX + ) + ) + else: + raise exc.TypeError("Unsupported algorithm") + + +def crypto_pwhash_alg( + outlen: int, + passwd: bytes, + salt: bytes, + opslimit: int, + memlimit: int, + alg: int, +) -> bytes: + """ + Derive a raw cryptographic key using the ``passwd`` and the ``salt`` + given as input to the ``alg`` algorithm. + + :param outlen: the length of the derived key + :type outlen: int + :param passwd: The input password + :type passwd: bytes + :param salt: + :type salt: bytes + :param opslimit: computational cost + :type opslimit: int + :param memlimit: memory cost + :type memlimit: int + :param alg: algorithm identifier + :type alg: int + :return: derived key + :rtype: bytes + """ + ensure(isinstance(outlen, int), raising=exc.TypeError) + ensure(isinstance(opslimit, int), raising=exc.TypeError) + ensure(isinstance(memlimit, int), raising=exc.TypeError) + ensure(isinstance(alg, int), raising=exc.TypeError) + ensure(isinstance(passwd, bytes), raising=exc.TypeError) + + if len(salt) != crypto_pwhash_SALTBYTES: + raise exc.ValueError( + "salt must be exactly {} bytes long".format( + crypto_pwhash_SALTBYTES + ) + ) + + if outlen < crypto_pwhash_BYTES_MIN: + raise exc.ValueError( + "derived key must be at least {} bytes long".format( + crypto_pwhash_BYTES_MIN + ) + ) + + elif outlen > crypto_pwhash_BYTES_MAX: + raise exc.ValueError( + "derived key must be at most {} bytes long".format( + crypto_pwhash_BYTES_MAX + ) + ) + + _check_argon2_limits_alg(opslimit, memlimit, alg) + + outbuf = ffi.new("unsigned char[]", outlen) + + ret = lib.crypto_pwhash( + outbuf, outlen, passwd, len(passwd), salt, opslimit, memlimit, alg + ) + + ensure( + ret == 0, + "Unexpected failure in key derivation", + raising=exc.RuntimeError, + ) + + return ffi.buffer(outbuf, outlen)[:] + + +def crypto_pwhash_str_alg( + passwd: bytes, + opslimit: int, + memlimit: int, + alg: int, +) -> bytes: + """ + Derive a cryptographic key using the ``passwd`` given as input + and a random salt, returning a string representation which + includes the salt, the tuning parameters and the used algorithm. + + :param passwd: The input password + :type passwd: bytes + :param opslimit: computational cost + :type opslimit: int + :param memlimit: memory cost + :type memlimit: int + :param alg: The algorithm to use + :type alg: int + :return: serialized derived key and parameters + :rtype: bytes + """ + ensure(isinstance(opslimit, int), raising=TypeError) + ensure(isinstance(memlimit, int), raising=TypeError) + ensure(isinstance(passwd, bytes), raising=TypeError) + + _check_argon2_limits_alg(opslimit, memlimit, alg) + + outbuf = ffi.new("char[]", 128) + + ret = lib.crypto_pwhash_str_alg( + outbuf, passwd, len(passwd), opslimit, memlimit, alg + ) + + ensure( + ret == 0, + "Unexpected failure in key derivation", + raising=exc.RuntimeError, + ) + + return ffi.string(outbuf) + + +def crypto_pwhash_str_verify(passwd_hash: bytes, passwd: bytes) -> bool: + """ + Verifies the ``passwd`` against a given password hash. + + Returns True on success, raises InvalidkeyError on failure + :param passwd_hash: saved password hash + :type passwd_hash: bytes + :param passwd: password to be checked + :type passwd: bytes + :return: success + :rtype: boolean + """ + ensure(isinstance(passwd_hash, bytes), raising=TypeError) + ensure(isinstance(passwd, bytes), raising=TypeError) + ensure( + len(passwd_hash) <= 127, + "Hash must be at most 127 bytes long", + raising=exc.ValueError, + ) + + ret = lib.crypto_pwhash_str_verify(passwd_hash, passwd, len(passwd)) + + ensure(ret == 0, "Wrong password", raising=exc.InvalidkeyError) + # all went well, therefore: + return True + + +crypto_pwhash_argon2i_str_verify = crypto_pwhash_str_verify diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_scalarmult.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_scalarmult.py new file mode 100644 index 0000000..ca4a281 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_scalarmult.py @@ -0,0 +1,240 @@ +# Copyright 2013-2018 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +has_crypto_scalarmult_ed25519 = bool(lib.PYNACL_HAS_CRYPTO_SCALARMULT_ED25519) + +crypto_scalarmult_BYTES: int = lib.crypto_scalarmult_bytes() +crypto_scalarmult_SCALARBYTES: int = lib.crypto_scalarmult_scalarbytes() + +crypto_scalarmult_ed25519_BYTES = 0 +crypto_scalarmult_ed25519_SCALARBYTES = 0 + +if has_crypto_scalarmult_ed25519: + crypto_scalarmult_ed25519_BYTES = lib.crypto_scalarmult_ed25519_bytes() + crypto_scalarmult_ed25519_SCALARBYTES = ( + lib.crypto_scalarmult_ed25519_scalarbytes() + ) + + +def crypto_scalarmult_base(n: bytes) -> bytes: + """ + Computes and returns the scalar product of a standard group element and an + integer ``n``. + + :param n: bytes + :rtype: bytes + """ + q = ffi.new("unsigned char[]", crypto_scalarmult_BYTES) + + rc = lib.crypto_scalarmult_base(q, n) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_SCALARBYTES)[:] + + +def crypto_scalarmult(n: bytes, p: bytes) -> bytes: + """ + Computes and returns the scalar product of the given group element and an + integer ``n``. + + :param p: bytes + :param n: bytes + :rtype: bytes + """ + q = ffi.new("unsigned char[]", crypto_scalarmult_BYTES) + + rc = lib.crypto_scalarmult(q, n, p) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_SCALARBYTES)[:] + + +def crypto_scalarmult_ed25519_base(n: bytes) -> bytes: + """ + Computes and returns the scalar product of a standard group element and an + integer ``n`` on the edwards25519 curve. + + :param n: a :py:data:`.crypto_scalarmult_ed25519_SCALARBYTES` long bytes + sequence representing a scalar + :type n: bytes + :return: a point on the edwards25519 curve, represented as a + :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_scalarmult_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(n, bytes) + and len(n) == crypto_scalarmult_ed25519_SCALARBYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + q = ffi.new("unsigned char[]", crypto_scalarmult_ed25519_BYTES) + + rc = lib.crypto_scalarmult_ed25519_base(q, n) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_ed25519_BYTES)[:] + + +def crypto_scalarmult_ed25519_base_noclamp(n: bytes) -> bytes: + """ + Computes and returns the scalar product of a standard group element and an + integer ``n`` on the edwards25519 curve. The integer ``n`` is not clamped. + + :param n: a :py:data:`.crypto_scalarmult_ed25519_SCALARBYTES` long bytes + sequence representing a scalar + :type n: bytes + :return: a point on the edwards25519 curve, represented as a + :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_scalarmult_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(n, bytes) + and len(n) == crypto_scalarmult_ed25519_SCALARBYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + q = ffi.new("unsigned char[]", crypto_scalarmult_ed25519_BYTES) + + rc = lib.crypto_scalarmult_ed25519_base_noclamp(q, n) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_ed25519_BYTES)[:] + + +def crypto_scalarmult_ed25519(n: bytes, p: bytes) -> bytes: + """ + Computes and returns the scalar product of a *clamped* integer ``n`` + and the given group element on the edwards25519 curve. + The scalar is clamped, as done in the public key generation case, + by setting to zero the bits in position [0, 1, 2, 255] and setting + to one the bit in position 254. + + :param n: a :py:data:`.crypto_scalarmult_ed25519_SCALARBYTES` long bytes + sequence representing a scalar + :type n: bytes + :param p: a :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type p: bytes + :return: a point on the edwards25519 curve, represented as a + :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_scalarmult_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(n, bytes) + and len(n) == crypto_scalarmult_ed25519_SCALARBYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(p, bytes) and len(p) == crypto_scalarmult_ed25519_BYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_BYTES" + ), + raising=exc.TypeError, + ) + + q = ffi.new("unsigned char[]", crypto_scalarmult_ed25519_BYTES) + + rc = lib.crypto_scalarmult_ed25519(q, n, p) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_ed25519_BYTES)[:] + + +def crypto_scalarmult_ed25519_noclamp(n: bytes, p: bytes) -> bytes: + """ + Computes and returns the scalar product of an integer ``n`` + and the given group element on the edwards25519 curve. The integer + ``n`` is not clamped. + + :param n: a :py:data:`.crypto_scalarmult_ed25519_SCALARBYTES` long bytes + sequence representing a scalar + :type n: bytes + :param p: a :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type p: bytes + :return: a point on the edwards25519 curve, represented as a + :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_scalarmult_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(n, bytes) + and len(n) == crypto_scalarmult_ed25519_SCALARBYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(p, bytes) and len(p) == crypto_scalarmult_ed25519_BYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_BYTES" + ), + raising=exc.TypeError, + ) + + q = ffi.new("unsigned char[]", crypto_scalarmult_ed25519_BYTES) + + rc = lib.crypto_scalarmult_ed25519_noclamp(q, n, p) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_ed25519_BYTES)[:] diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_secretbox.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_secretbox.py new file mode 100644 index 0000000..d1ad113 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_secretbox.py @@ -0,0 +1,159 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +crypto_secretbox_KEYBYTES: int = lib.crypto_secretbox_keybytes() +crypto_secretbox_NONCEBYTES: int = lib.crypto_secretbox_noncebytes() +crypto_secretbox_ZEROBYTES: int = lib.crypto_secretbox_zerobytes() +crypto_secretbox_BOXZEROBYTES: int = lib.crypto_secretbox_boxzerobytes() +crypto_secretbox_MACBYTES: int = lib.crypto_secretbox_macbytes() +crypto_secretbox_MESSAGEBYTES_MAX: int = ( + lib.crypto_secretbox_messagebytes_max() +) + + +def crypto_secretbox(message: bytes, nonce: bytes, key: bytes) -> bytes: + """ + Encrypts and returns the message ``message`` with the secret ``key`` and + the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param key: bytes + :rtype: bytes + """ + if len(key) != crypto_secretbox_KEYBYTES: + raise exc.ValueError("Invalid key") + + if len(nonce) != crypto_secretbox_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + padded = b"\x00" * crypto_secretbox_ZEROBYTES + message + ciphertext = ffi.new("unsigned char[]", len(padded)) + + res = lib.crypto_secretbox(ciphertext, padded, len(padded), nonce, key) + ensure(res == 0, "Encryption failed", raising=exc.CryptoError) + + ciphertext = ffi.buffer(ciphertext, len(padded)) + return ciphertext[crypto_secretbox_BOXZEROBYTES:] + + +def crypto_secretbox_open( + ciphertext: bytes, nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt and returns the encrypted message ``ciphertext`` with the secret + ``key`` and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param key: bytes + :rtype: bytes + """ + if len(key) != crypto_secretbox_KEYBYTES: + raise exc.ValueError("Invalid key") + + if len(nonce) != crypto_secretbox_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + padded = b"\x00" * crypto_secretbox_BOXZEROBYTES + ciphertext + plaintext = ffi.new("unsigned char[]", len(padded)) + + res = lib.crypto_secretbox_open(plaintext, padded, len(padded), nonce, key) + ensure( + res == 0, + "Decryption failed. Ciphertext failed verification", + raising=exc.CryptoError, + ) + + plaintext = ffi.buffer(plaintext, len(padded)) + return plaintext[crypto_secretbox_ZEROBYTES:] + + +def crypto_secretbox_easy(message: bytes, nonce: bytes, key: bytes) -> bytes: + """ + Encrypts and returns the message ``message`` with the secret ``key`` and + the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param key: bytes + :rtype: bytes + """ + if len(key) != crypto_secretbox_KEYBYTES: + raise exc.ValueError("Invalid key") + + if len(nonce) != crypto_secretbox_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + _mlen = len(message) + _clen = crypto_secretbox_MACBYTES + _mlen + + ciphertext = ffi.new("unsigned char[]", _clen) + + res = lib.crypto_secretbox_easy(ciphertext, message, _mlen, nonce, key) + ensure(res == 0, "Encryption failed", raising=exc.CryptoError) + + ciphertext = ffi.buffer(ciphertext, _clen) + return ciphertext[:] + + +def crypto_secretbox_open_easy( + ciphertext: bytes, nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt and returns the encrypted message ``ciphertext`` with the secret + ``key`` and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param key: bytes + :rtype: bytes + """ + if len(key) != crypto_secretbox_KEYBYTES: + raise exc.ValueError("Invalid key") + + if len(nonce) != crypto_secretbox_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + _clen = len(ciphertext) + + ensure( + _clen >= crypto_secretbox_MACBYTES, + "Input ciphertext must be at least {} long".format( + crypto_secretbox_MACBYTES + ), + raising=exc.TypeError, + ) + + _mlen = _clen - crypto_secretbox_MACBYTES + + plaintext = ffi.new("unsigned char[]", max(1, _mlen)) + + res = lib.crypto_secretbox_open_easy( + plaintext, ciphertext, _clen, nonce, key + ) + ensure( + res == 0, + "Decryption failed. Ciphertext failed verification", + raising=exc.CryptoError, + ) + + plaintext = ffi.buffer(plaintext, _mlen) + return plaintext[:] diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_secretstream.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_secretstream.py new file mode 100644 index 0000000..59b074c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_secretstream.py @@ -0,0 +1,358 @@ +# Copyright 2013-2018 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional, Tuple, Union, cast + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +crypto_secretstream_xchacha20poly1305_ABYTES: int = ( + lib.crypto_secretstream_xchacha20poly1305_abytes() +) +crypto_secretstream_xchacha20poly1305_HEADERBYTES: int = ( + lib.crypto_secretstream_xchacha20poly1305_headerbytes() +) +crypto_secretstream_xchacha20poly1305_KEYBYTES: int = ( + lib.crypto_secretstream_xchacha20poly1305_keybytes() +) +crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX: int = ( + lib.crypto_secretstream_xchacha20poly1305_messagebytes_max() +) +crypto_secretstream_xchacha20poly1305_STATEBYTES: int = ( + lib.crypto_secretstream_xchacha20poly1305_statebytes() +) + + +crypto_secretstream_xchacha20poly1305_TAG_MESSAGE: int = ( + lib.crypto_secretstream_xchacha20poly1305_tag_message() +) +crypto_secretstream_xchacha20poly1305_TAG_PUSH: int = ( + lib.crypto_secretstream_xchacha20poly1305_tag_push() +) +crypto_secretstream_xchacha20poly1305_TAG_REKEY: int = ( + lib.crypto_secretstream_xchacha20poly1305_tag_rekey() +) +crypto_secretstream_xchacha20poly1305_TAG_FINAL: int = ( + lib.crypto_secretstream_xchacha20poly1305_tag_final() +) + + +def crypto_secretstream_xchacha20poly1305_keygen() -> bytes: + """ + Generate a key for use with + :func:`.crypto_secretstream_xchacha20poly1305_init_push`. + + """ + keybuf = ffi.new( + "unsigned char[]", + crypto_secretstream_xchacha20poly1305_KEYBYTES, + ) + lib.crypto_secretstream_xchacha20poly1305_keygen(keybuf) + return ffi.buffer(keybuf)[:] + + +class crypto_secretstream_xchacha20poly1305_state: + """ + An object wrapping the crypto_secretstream_xchacha20poly1305 state. + + """ + + __slots__ = ["statebuf", "rawbuf", "tagbuf"] + + def __init__(self) -> None: + """Initialize a clean state object.""" + ByteString = Union[bytes, bytearray, memoryview] + self.statebuf: ByteString = ffi.new( + "unsigned char[]", + crypto_secretstream_xchacha20poly1305_STATEBYTES, + ) + + self.rawbuf: Optional[ByteString] = None + self.tagbuf: Optional[ByteString] = None + + +def crypto_secretstream_xchacha20poly1305_init_push( + state: crypto_secretstream_xchacha20poly1305_state, key: bytes +) -> bytes: + """ + Initialize a crypto_secretstream_xchacha20poly1305 encryption buffer. + + :param state: a secretstream state object + :type state: crypto_secretstream_xchacha20poly1305_state + :param key: must be + :data:`.crypto_secretstream_xchacha20poly1305_KEYBYTES` long + :type key: bytes + :return: header + :rtype: bytes + + """ + ensure( + isinstance(state, crypto_secretstream_xchacha20poly1305_state), + "State must be a crypto_secretstream_xchacha20poly1305_state object", + raising=exc.TypeError, + ) + ensure( + isinstance(key, bytes), + "Key must be a bytes sequence", + raising=exc.TypeError, + ) + ensure( + len(key) == crypto_secretstream_xchacha20poly1305_KEYBYTES, + "Invalid key length", + raising=exc.ValueError, + ) + + headerbuf = ffi.new( + "unsigned char []", + crypto_secretstream_xchacha20poly1305_HEADERBYTES, + ) + + rc = lib.crypto_secretstream_xchacha20poly1305_init_push( + state.statebuf, headerbuf, key + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + return ffi.buffer(headerbuf)[:] + + +def crypto_secretstream_xchacha20poly1305_push( + state: crypto_secretstream_xchacha20poly1305_state, + m: bytes, + ad: Optional[bytes] = None, + tag: int = crypto_secretstream_xchacha20poly1305_TAG_MESSAGE, +) -> bytes: + """ + Add an encrypted message to the secret stream. + + :param state: a secretstream state object + :type state: crypto_secretstream_xchacha20poly1305_state + :param m: the message to encrypt, the maximum length of an individual + message is + :data:`.crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX`. + :type m: bytes + :param ad: additional data to include in the authentication tag + :type ad: bytes or None + :param tag: the message tag, usually + :data:`.crypto_secretstream_xchacha20poly1305_TAG_MESSAGE` or + :data:`.crypto_secretstream_xchacha20poly1305_TAG_FINAL`. + :type tag: int + :return: ciphertext + :rtype: bytes + + """ + ensure( + isinstance(state, crypto_secretstream_xchacha20poly1305_state), + "State must be a crypto_secretstream_xchacha20poly1305_state object", + raising=exc.TypeError, + ) + ensure(isinstance(m, bytes), "Message is not bytes", raising=exc.TypeError) + ensure( + len(m) <= crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX, + "Message is too long", + raising=exc.ValueError, + ) + ensure( + ad is None or isinstance(ad, bytes), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + clen = len(m) + crypto_secretstream_xchacha20poly1305_ABYTES + if state.rawbuf is None or len(state.rawbuf) < clen: + state.rawbuf = ffi.new("unsigned char[]", clen) + + if ad is None: + ad = ffi.NULL + adlen = 0 + else: + adlen = len(ad) + + rc = lib.crypto_secretstream_xchacha20poly1305_push( + state.statebuf, + state.rawbuf, + ffi.NULL, + m, + len(m), + ad, + adlen, + tag, + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + return ffi.buffer(state.rawbuf, clen)[:] + + +def crypto_secretstream_xchacha20poly1305_init_pull( + state: crypto_secretstream_xchacha20poly1305_state, + header: bytes, + key: bytes, +) -> None: + """ + Initialize a crypto_secretstream_xchacha20poly1305 decryption buffer. + + :param state: a secretstream state object + :type state: crypto_secretstream_xchacha20poly1305_state + :param header: must be + :data:`.crypto_secretstream_xchacha20poly1305_HEADERBYTES` long + :type header: bytes + :param key: must be + :data:`.crypto_secretstream_xchacha20poly1305_KEYBYTES` long + :type key: bytes + + """ + ensure( + isinstance(state, crypto_secretstream_xchacha20poly1305_state), + "State must be a crypto_secretstream_xchacha20poly1305_state object", + raising=exc.TypeError, + ) + ensure( + isinstance(header, bytes), + "Header must be a bytes sequence", + raising=exc.TypeError, + ) + ensure( + len(header) == crypto_secretstream_xchacha20poly1305_HEADERBYTES, + "Invalid header length", + raising=exc.ValueError, + ) + ensure( + isinstance(key, bytes), + "Key must be a bytes sequence", + raising=exc.TypeError, + ) + ensure( + len(key) == crypto_secretstream_xchacha20poly1305_KEYBYTES, + "Invalid key length", + raising=exc.ValueError, + ) + + if state.tagbuf is None: + state.tagbuf = ffi.new("unsigned char *") + + rc = lib.crypto_secretstream_xchacha20poly1305_init_pull( + state.statebuf, header, key + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + +def crypto_secretstream_xchacha20poly1305_pull( + state: crypto_secretstream_xchacha20poly1305_state, + c: bytes, + ad: Optional[bytes] = None, +) -> Tuple[bytes, int]: + """ + Read a decrypted message from the secret stream. + + :param state: a secretstream state object + :type state: crypto_secretstream_xchacha20poly1305_state + :param c: the ciphertext to decrypt, the maximum length of an individual + ciphertext is + :data:`.crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX` + + :data:`.crypto_secretstream_xchacha20poly1305_ABYTES`. + :type c: bytes + :param ad: additional data to include in the authentication tag + :type ad: bytes or None + :return: (message, tag) + :rtype: (bytes, int) + + """ + ensure( + isinstance(state, crypto_secretstream_xchacha20poly1305_state), + "State must be a crypto_secretstream_xchacha20poly1305_state object", + raising=exc.TypeError, + ) + ensure( + state.tagbuf is not None, + ( + "State must be initialized using " + "crypto_secretstream_xchacha20poly1305_init_pull" + ), + raising=exc.ValueError, + ) + ensure( + isinstance(c, bytes), + "Ciphertext is not bytes", + raising=exc.TypeError, + ) + ensure( + len(c) >= crypto_secretstream_xchacha20poly1305_ABYTES, + "Ciphertext is too short", + raising=exc.ValueError, + ) + ensure( + len(c) + <= ( + crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX + + crypto_secretstream_xchacha20poly1305_ABYTES + ), + "Ciphertext is too long", + raising=exc.ValueError, + ) + ensure( + ad is None or isinstance(ad, bytes), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + mlen = len(c) - crypto_secretstream_xchacha20poly1305_ABYTES + if state.rawbuf is None or len(state.rawbuf) < mlen: + state.rawbuf = ffi.new("unsigned char[]", mlen) + + if ad is None: + ad = ffi.NULL + adlen = 0 + else: + adlen = len(ad) + + rc = lib.crypto_secretstream_xchacha20poly1305_pull( + state.statebuf, + state.rawbuf, + ffi.NULL, + state.tagbuf, + c, + len(c), + ad, + adlen, + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + # Cast safety: we `ensure` above that `state.tagbuf is not None`. + return ( + ffi.buffer(state.rawbuf, mlen)[:], + int(cast(bytes, state.tagbuf)[0]), + ) + + +def crypto_secretstream_xchacha20poly1305_rekey( + state: crypto_secretstream_xchacha20poly1305_state, +) -> None: + """ + Explicitly change the encryption key in the stream. + + Normally the stream is re-keyed as needed or an explicit ``tag`` of + :data:`.crypto_secretstream_xchacha20poly1305_TAG_REKEY` is added to a + message to ensure forward secrecy, but this method can be used instead + if the re-keying is controlled without adding the tag. + + :param state: a secretstream state object + :type state: crypto_secretstream_xchacha20poly1305_state + + """ + ensure( + isinstance(state, crypto_secretstream_xchacha20poly1305_state), + "State must be a crypto_secretstream_xchacha20poly1305_state object", + raising=exc.TypeError, + ) + lib.crypto_secretstream_xchacha20poly1305_rekey(state.statebuf) diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_shorthash.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_shorthash.py new file mode 100644 index 0000000..8f7d209 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_shorthash.py @@ -0,0 +1,81 @@ +# Copyright 2016 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import nacl.exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +has_crypto_shorthash_siphashx24 = bool( + lib.PYNACL_HAS_CRYPTO_SHORTHASH_SIPHASHX24 +) + +BYTES: int = lib.crypto_shorthash_siphash24_bytes() +KEYBYTES: int = lib.crypto_shorthash_siphash24_keybytes() + +XBYTES = 0 +XKEYBYTES = 0 + +if has_crypto_shorthash_siphashx24: + XBYTES = lib.crypto_shorthash_siphashx24_bytes() + XKEYBYTES = lib.crypto_shorthash_siphashx24_keybytes() + + +def crypto_shorthash_siphash24(data: bytes, key: bytes) -> bytes: + """Compute a fast, cryptographic quality, keyed hash of the input data + + :param data: + :type data: bytes + :param key: len(key) must be equal to + :py:data:`.KEYBYTES` (16) + :type key: bytes + """ + if len(key) != KEYBYTES: + raise exc.ValueError( + "Key length must be exactly {} bytes".format(KEYBYTES) + ) + digest = ffi.new("unsigned char[]", BYTES) + rc = lib.crypto_shorthash_siphash24(digest, data, len(data), key) + + ensure(rc == 0, raising=exc.RuntimeError) + return ffi.buffer(digest, BYTES)[:] + + +def crypto_shorthash_siphashx24(data: bytes, key: bytes) -> bytes: + """Compute a fast, cryptographic quality, keyed hash of the input data + + :param data: + :type data: bytes + :param key: len(key) must be equal to + :py:data:`.XKEYBYTES` (16) + :type key: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_shorthash_siphashx24, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + if len(key) != XKEYBYTES: + raise exc.ValueError( + "Key length must be exactly {} bytes".format(XKEYBYTES) + ) + digest = ffi.new("unsigned char[]", XBYTES) + rc = lib.crypto_shorthash_siphashx24(digest, data, len(data), key) + + ensure(rc == 0, raising=exc.RuntimeError) + return ffi.buffer(digest, XBYTES)[:] diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_sign.py b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_sign.py new file mode 100644 index 0000000..f459f6a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/crypto_sign.py @@ -0,0 +1,327 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Tuple + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +crypto_sign_BYTES: int = lib.crypto_sign_bytes() +# crypto_sign_SEEDBYTES = lib.crypto_sign_seedbytes() +crypto_sign_SEEDBYTES: int = lib.crypto_sign_secretkeybytes() // 2 +crypto_sign_PUBLICKEYBYTES: int = lib.crypto_sign_publickeybytes() +crypto_sign_SECRETKEYBYTES: int = lib.crypto_sign_secretkeybytes() + +crypto_sign_curve25519_BYTES: int = lib.crypto_box_secretkeybytes() + +crypto_sign_ed25519ph_STATEBYTES: int = lib.crypto_sign_ed25519ph_statebytes() + + +def crypto_sign_keypair() -> Tuple[bytes, bytes]: + """ + Returns a randomly generated public key and secret key. + + :rtype: (bytes(public_key), bytes(secret_key)) + """ + pk = ffi.new("unsigned char[]", crypto_sign_PUBLICKEYBYTES) + sk = ffi.new("unsigned char[]", crypto_sign_SECRETKEYBYTES) + + rc = lib.crypto_sign_keypair(pk, sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ( + ffi.buffer(pk, crypto_sign_PUBLICKEYBYTES)[:], + ffi.buffer(sk, crypto_sign_SECRETKEYBYTES)[:], + ) + + +def crypto_sign_seed_keypair(seed: bytes) -> Tuple[bytes, bytes]: + """ + Computes and returns the public key and secret key using the seed ``seed``. + + :param seed: bytes + :rtype: (bytes(public_key), bytes(secret_key)) + """ + if len(seed) != crypto_sign_SEEDBYTES: + raise exc.ValueError("Invalid seed") + + pk = ffi.new("unsigned char[]", crypto_sign_PUBLICKEYBYTES) + sk = ffi.new("unsigned char[]", crypto_sign_SECRETKEYBYTES) + + rc = lib.crypto_sign_seed_keypair(pk, sk, seed) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ( + ffi.buffer(pk, crypto_sign_PUBLICKEYBYTES)[:], + ffi.buffer(sk, crypto_sign_SECRETKEYBYTES)[:], + ) + + +def crypto_sign(message: bytes, sk: bytes) -> bytes: + """ + Signs the message ``message`` using the secret key ``sk`` and returns the + signed message. + + :param message: bytes + :param sk: bytes + :rtype: bytes + """ + signed = ffi.new("unsigned char[]", len(message) + crypto_sign_BYTES) + signed_len = ffi.new("unsigned long long *") + + rc = lib.crypto_sign(signed, signed_len, message, len(message), sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(signed, signed_len[0])[:] + + +def crypto_sign_open(signed: bytes, pk: bytes) -> bytes: + """ + Verifies the signature of the signed message ``signed`` using the public + key ``pk`` and returns the unsigned message. + + :param signed: bytes + :param pk: bytes + :rtype: bytes + """ + message = ffi.new("unsigned char[]", len(signed)) + message_len = ffi.new("unsigned long long *") + + if ( + lib.crypto_sign_open(message, message_len, signed, len(signed), pk) + != 0 + ): + raise exc.BadSignatureError("Signature was forged or corrupt") + + return ffi.buffer(message, message_len[0])[:] + + +def crypto_sign_ed25519_pk_to_curve25519(public_key_bytes: bytes) -> bytes: + """ + Converts a public Ed25519 key (encoded as bytes ``public_key_bytes``) to + a public Curve25519 key as bytes. + + Raises a ValueError if ``public_key_bytes`` is not of length + ``crypto_sign_PUBLICKEYBYTES`` + + :param public_key_bytes: bytes + :rtype: bytes + """ + if len(public_key_bytes) != crypto_sign_PUBLICKEYBYTES: + raise exc.ValueError("Invalid curve public key") + + curve_public_key_len = crypto_sign_curve25519_BYTES + curve_public_key = ffi.new("unsigned char[]", curve_public_key_len) + + rc = lib.crypto_sign_ed25519_pk_to_curve25519( + curve_public_key, public_key_bytes + ) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(curve_public_key, curve_public_key_len)[:] + + +def crypto_sign_ed25519_sk_to_curve25519(secret_key_bytes: bytes) -> bytes: + """ + Converts a secret Ed25519 key (encoded as bytes ``secret_key_bytes``) to + a secret Curve25519 key as bytes. + + Raises a ValueError if ``secret_key_bytes``is not of length + ``crypto_sign_SECRETKEYBYTES`` + + :param secret_key_bytes: bytes + :rtype: bytes + """ + if len(secret_key_bytes) != crypto_sign_SECRETKEYBYTES: + raise exc.ValueError("Invalid curve secret key") + + curve_secret_key_len = crypto_sign_curve25519_BYTES + curve_secret_key = ffi.new("unsigned char[]", curve_secret_key_len) + + rc = lib.crypto_sign_ed25519_sk_to_curve25519( + curve_secret_key, secret_key_bytes + ) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(curve_secret_key, curve_secret_key_len)[:] + + +def crypto_sign_ed25519_sk_to_pk(secret_key_bytes: bytes) -> bytes: + """ + Extract the public Ed25519 key from a secret Ed25519 key (encoded + as bytes ``secret_key_bytes``). + + Raises a ValueError if ``secret_key_bytes``is not of length + ``crypto_sign_SECRETKEYBYTES`` + + :param secret_key_bytes: bytes + :rtype: bytes + """ + if len(secret_key_bytes) != crypto_sign_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + return secret_key_bytes[crypto_sign_SEEDBYTES:] + + +def crypto_sign_ed25519_sk_to_seed(secret_key_bytes: bytes) -> bytes: + """ + Extract the seed from a secret Ed25519 key (encoded + as bytes ``secret_key_bytes``). + + Raises a ValueError if ``secret_key_bytes``is not of length + ``crypto_sign_SECRETKEYBYTES`` + + :param secret_key_bytes: bytes + :rtype: bytes + """ + if len(secret_key_bytes) != crypto_sign_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + return secret_key_bytes[:crypto_sign_SEEDBYTES] + + +class crypto_sign_ed25519ph_state: + """ + State object wrapping the sha-512 state used in ed25519ph computation + """ + + __slots__ = ["state"] + + def __init__(self) -> None: + self.state: bytes = ffi.new( + "unsigned char[]", crypto_sign_ed25519ph_STATEBYTES + ) + + rc = lib.crypto_sign_ed25519ph_init(self.state) + + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + +def crypto_sign_ed25519ph_update( + edph: crypto_sign_ed25519ph_state, pmsg: bytes +) -> None: + """ + Update the hash state wrapped in edph + + :param edph: the ed25519ph state being updated + :type edph: crypto_sign_ed25519ph_state + :param pmsg: the partial message + :type pmsg: bytes + :rtype: None + """ + ensure( + isinstance(edph, crypto_sign_ed25519ph_state), + "edph parameter must be a ed25519ph_state object", + raising=exc.TypeError, + ) + ensure( + isinstance(pmsg, bytes), + "pmsg parameter must be a bytes object", + raising=exc.TypeError, + ) + rc = lib.crypto_sign_ed25519ph_update(edph.state, pmsg, len(pmsg)) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + +def crypto_sign_ed25519ph_final_create( + edph: crypto_sign_ed25519ph_state, sk: bytes +) -> bytes: + """ + Create a signature for the data hashed in edph + using the secret key sk + + :param edph: the ed25519ph state for the data + being signed + :type edph: crypto_sign_ed25519ph_state + :param sk: the ed25519 secret key (secret and public part) + :type sk: bytes + :return: ed25519ph signature + :rtype: bytes + """ + ensure( + isinstance(edph, crypto_sign_ed25519ph_state), + "edph parameter must be a ed25519ph_state object", + raising=exc.TypeError, + ) + ensure( + isinstance(sk, bytes), + "secret key parameter must be a bytes object", + raising=exc.TypeError, + ) + ensure( + len(sk) == crypto_sign_SECRETKEYBYTES, + ("secret key must be {} bytes long").format( + crypto_sign_SECRETKEYBYTES + ), + raising=exc.TypeError, + ) + signature = ffi.new("unsigned char[]", crypto_sign_BYTES) + rc = lib.crypto_sign_ed25519ph_final_create( + edph.state, signature, ffi.NULL, sk + ) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(signature, crypto_sign_BYTES)[:] + + +def crypto_sign_ed25519ph_final_verify( + edph: crypto_sign_ed25519ph_state, signature: bytes, pk: bytes +) -> bool: + """ + Verify a prehashed signature using the public key pk + + :param edph: the ed25519ph state for the data + being verified + :type edph: crypto_sign_ed25519ph_state + :param signature: the signature being verified + :type signature: bytes + :param pk: the ed25519 public part of the signing key + :type pk: bytes + :return: True if the signature is valid + :rtype: boolean + :raises exc.BadSignatureError: if the signature is not valid + """ + ensure( + isinstance(edph, crypto_sign_ed25519ph_state), + "edph parameter must be a ed25519ph_state object", + raising=exc.TypeError, + ) + ensure( + isinstance(signature, bytes), + "signature parameter must be a bytes object", + raising=exc.TypeError, + ) + ensure( + len(signature) == crypto_sign_BYTES, + ("signature must be {} bytes long").format(crypto_sign_BYTES), + raising=exc.TypeError, + ) + ensure( + isinstance(pk, bytes), + "public key parameter must be a bytes object", + raising=exc.TypeError, + ) + ensure( + len(pk) == crypto_sign_PUBLICKEYBYTES, + ("public key must be {} bytes long").format( + crypto_sign_PUBLICKEYBYTES + ), + raising=exc.TypeError, + ) + rc = lib.crypto_sign_ed25519ph_final_verify(edph.state, signature, pk) + if rc != 0: + raise exc.BadSignatureError("Signature was forged or corrupt") + + return True diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/randombytes.py b/.venv/lib/python3.9/site-packages/nacl/bindings/randombytes.py new file mode 100644 index 0000000..ed76deb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/randombytes.py @@ -0,0 +1,51 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib + +randombytes_SEEDBYTES: int = lib.randombytes_seedbytes() + + +def randombytes(size: int) -> bytes: + """ + Returns ``size`` number of random bytes from a cryptographically secure + random source. + + :param size: int + :rtype: bytes + """ + buf = ffi.new("unsigned char[]", size) + lib.randombytes(buf, size) + return ffi.buffer(buf, size)[:] + + +def randombytes_buf_deterministic(size: int, seed: bytes) -> bytes: + """ + Returns ``size`` number of deterministically generated pseudorandom bytes + from a seed + + :param size: int + :param seed: bytes + :rtype: bytes + """ + if len(seed) != randombytes_SEEDBYTES: + raise exc.TypeError( + "Deterministic random bytes must be generated from 32 bytes" + ) + + buf = ffi.new("unsigned char[]", size) + lib.randombytes_buf_deterministic(buf, size, seed) + return ffi.buffer(buf, size)[:] diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/sodium_core.py b/.venv/lib/python3.9/site-packages/nacl/bindings/sodium_core.py new file mode 100644 index 0000000..7ebb84c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/sodium_core.py @@ -0,0 +1,33 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +def _sodium_init() -> None: + ensure( + lib.sodium_init() != -1, + "Could not initialize sodium", + raising=exc.RuntimeError, + ) + + +def sodium_init() -> None: + """ + Initializes sodium, picking the best implementations available for this + machine. + """ + ffi.init_once(_sodium_init, "libsodium") diff --git a/.venv/lib/python3.9/site-packages/nacl/bindings/utils.py b/.venv/lib/python3.9/site-packages/nacl/bindings/utils.py new file mode 100644 index 0000000..0ff22e3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/bindings/utils.py @@ -0,0 +1,141 @@ +# Copyright 2013-2017 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import nacl.exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +def sodium_memcmp(inp1: bytes, inp2: bytes) -> bool: + """ + Compare contents of two memory regions in constant time + """ + ensure(isinstance(inp1, bytes), raising=exc.TypeError) + ensure(isinstance(inp2, bytes), raising=exc.TypeError) + + ln = max(len(inp1), len(inp2)) + + buf1 = ffi.new("char []", ln) + buf2 = ffi.new("char []", ln) + + ffi.memmove(buf1, inp1, len(inp1)) + ffi.memmove(buf2, inp2, len(inp2)) + + eqL = len(inp1) == len(inp2) + eqC = lib.sodium_memcmp(buf1, buf2, ln) == 0 + + return eqL and eqC + + +def sodium_pad(s: bytes, blocksize: int) -> bytes: + """ + Pad the input bytearray ``s`` to a multiple of ``blocksize`` + using the ISO/IEC 7816-4 algorithm + + :param s: input bytes string + :type s: bytes + :param blocksize: + :type blocksize: int + :return: padded string + :rtype: bytes + """ + ensure(isinstance(s, bytes), raising=exc.TypeError) + ensure(isinstance(blocksize, int), raising=exc.TypeError) + if blocksize <= 0: + raise exc.ValueError + s_len = len(s) + m_len = s_len + blocksize + buf = ffi.new("unsigned char []", m_len) + p_len = ffi.new("size_t []", 1) + ffi.memmove(buf, s, s_len) + rc = lib.sodium_pad(p_len, buf, s_len, blocksize, m_len) + ensure(rc == 0, "Padding failure", raising=exc.CryptoError) + return ffi.buffer(buf, p_len[0])[:] + + +def sodium_unpad(s: bytes, blocksize: int) -> bytes: + """ + Remove ISO/IEC 7816-4 padding from the input byte array ``s`` + + :param s: input bytes string + :type s: bytes + :param blocksize: + :type blocksize: int + :return: unpadded string + :rtype: bytes + """ + ensure(isinstance(s, bytes), raising=exc.TypeError) + ensure(isinstance(blocksize, int), raising=exc.TypeError) + s_len = len(s) + u_len = ffi.new("size_t []", 1) + rc = lib.sodium_unpad(u_len, s, s_len, blocksize) + if rc != 0: + raise exc.CryptoError("Unpadding failure") + return s[: u_len[0]] + + +def sodium_increment(inp: bytes) -> bytes: + """ + Increment the value of a byte-sequence interpreted + as the little-endian representation of a unsigned big integer. + + :param inp: input bytes buffer + :type inp: bytes + :return: a byte-sequence representing, as a little-endian + unsigned big integer, the value ``to_int(inp)`` + incremented by one. + :rtype: bytes + + """ + ensure(isinstance(inp, bytes), raising=exc.TypeError) + + ln = len(inp) + buf = ffi.new("unsigned char []", ln) + + ffi.memmove(buf, inp, ln) + + lib.sodium_increment(buf, ln) + + return ffi.buffer(buf, ln)[:] + + +def sodium_add(a: bytes, b: bytes) -> bytes: + """ + Given a couple of *same-sized* byte sequences, interpreted as the + little-endian representation of two unsigned integers, compute + the modular addition of the represented values, in constant time for + a given common length of the byte sequences. + + :param a: input bytes buffer + :type a: bytes + :param b: input bytes buffer + :type b: bytes + :return: a byte-sequence representing, as a little-endian big integer, + the integer value of ``(to_int(a) + to_int(b)) mod 2^(8*len(a))`` + :rtype: bytes + """ + ensure(isinstance(a, bytes), raising=exc.TypeError) + ensure(isinstance(b, bytes), raising=exc.TypeError) + ln = len(a) + ensure(len(b) == ln, raising=exc.TypeError) + + buf_a = ffi.new("unsigned char []", ln) + buf_b = ffi.new("unsigned char []", ln) + + ffi.memmove(buf_a, a, ln) + ffi.memmove(buf_b, b, ln) + + lib.sodium_add(buf_a, buf_b, ln) + + return ffi.buffer(buf_a, ln)[:] diff --git a/.venv/lib/python3.9/site-packages/nacl/encoding.py b/.venv/lib/python3.9/site-packages/nacl/encoding.py new file mode 100644 index 0000000..6740cfb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/encoding.py @@ -0,0 +1,105 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import base64 +import binascii +from abc import ABCMeta, abstractmethod +from typing import SupportsBytes, Type + + +# TODO: when the minimum supported version of Python is 3.8, we can import +# Protocol from typing, and replace Encoder with a Protocol instead. +class _Encoder(metaclass=ABCMeta): + @staticmethod + @abstractmethod + def encode(data: bytes) -> bytes: + """Transform raw data to encoded data.""" + + @staticmethod + @abstractmethod + def decode(data: bytes) -> bytes: + """Transform encoded data back to raw data. + + Decoding after encoding should be a no-op, i.e. `decode(encode(x)) == x`. + """ + + +# Functions that use encoders are passed a subclass of _Encoder, not an instance +# (because the methods are all static). Let's gloss over that detail by defining +# an alias for Type[_Encoder]. +Encoder = Type[_Encoder] + + +class RawEncoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return data + + @staticmethod + def decode(data: bytes) -> bytes: + return data + + +class HexEncoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return binascii.hexlify(data) + + @staticmethod + def decode(data: bytes) -> bytes: + return binascii.unhexlify(data) + + +class Base16Encoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return base64.b16encode(data) + + @staticmethod + def decode(data: bytes) -> bytes: + return base64.b16decode(data) + + +class Base32Encoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return base64.b32encode(data) + + @staticmethod + def decode(data: bytes) -> bytes: + return base64.b32decode(data) + + +class Base64Encoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return base64.b64encode(data) + + @staticmethod + def decode(data: bytes) -> bytes: + return base64.b64decode(data) + + +class URLSafeBase64Encoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return base64.urlsafe_b64encode(data) + + @staticmethod + def decode(data: bytes) -> bytes: + return base64.urlsafe_b64decode(data) + + +class Encodable: + def encode(self: SupportsBytes, encoder: Encoder = RawEncoder) -> bytes: + return encoder.encode(bytes(self)) diff --git a/.venv/lib/python3.9/site-packages/nacl/exceptions.py b/.venv/lib/python3.9/site-packages/nacl/exceptions.py new file mode 100644 index 0000000..40b1635 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/exceptions.py @@ -0,0 +1,88 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# We create a clone of various builtin Exception types which additionally +# inherit from CryptoError. Below, we refer to the parent types via the +# `builtins` namespace, so mypy can distinguish between (e.g.) +# `nacl.exceptions.RuntimeError` and `builtins.RuntimeError`. +import builtins +from typing import Type + + +class CryptoError(Exception): + """ + Base exception for all nacl related errors + """ + + +class BadSignatureError(CryptoError): + """ + Raised when the signature was forged or otherwise corrupt. + """ + + +class RuntimeError(builtins.RuntimeError, CryptoError): + pass + + +class AssertionError(builtins.AssertionError, CryptoError): + pass + + +class TypeError(builtins.TypeError, CryptoError): + pass + + +class ValueError(builtins.ValueError, CryptoError): + pass + + +class InvalidkeyError(CryptoError): + pass + + +class CryptPrefixError(InvalidkeyError): + pass + + +class UnavailableError(RuntimeError): + """ + is a subclass of :class:`~nacl.exceptions.RuntimeError`, raised when + trying to call functions not available in a minimal build of + libsodium or due to hardware limitations. + """ + + pass + + +def ensure(cond: bool, *args: object, **kwds: Type[Exception]) -> None: + """ + Return if a condition is true, otherwise raise a caller-configurable + :py:class:`Exception` + :param bool cond: the condition to be checked + :param sequence args: the arguments to be passed to the exception's + constructor + The only accepted named parameter is `raising` used to configure the + exception to be raised if `cond` is not `True` + """ + _CHK_UNEXP = "check_condition() got an unexpected keyword argument {0}" + + raising = kwds.pop("raising", AssertionError) + if kwds: + raise TypeError(_CHK_UNEXP.format(repr(kwds.popitem()[0]))) + + if cond is True: + return + raise raising(*args) diff --git a/.venv/lib/python3.9/site-packages/nacl/hash.py b/.venv/lib/python3.9/site-packages/nacl/hash.py new file mode 100644 index 0000000..9f81590 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/hash.py @@ -0,0 +1,181 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +The :mod:`nacl.hash` module exposes one-shot interfaces +for libsodium selected hash primitives and the constants needed +for their usage. +""" + +import nacl.bindings +import nacl.encoding + + +BLAKE2B_BYTES = nacl.bindings.crypto_generichash_BYTES +"""Default digest size for :func:`blake2b` hash""" +BLAKE2B_BYTES_MIN = nacl.bindings.crypto_generichash_BYTES_MIN +"""Minimum allowed digest size for :func:`blake2b` hash""" +BLAKE2B_BYTES_MAX = nacl.bindings.crypto_generichash_BYTES_MAX +"""Maximum allowed digest size for :func:`blake2b` hash""" +BLAKE2B_KEYBYTES = nacl.bindings.crypto_generichash_KEYBYTES +"""Default size of the ``key`` byte array for :func:`blake2b` hash""" +BLAKE2B_KEYBYTES_MIN = nacl.bindings.crypto_generichash_KEYBYTES_MIN +"""Minimum allowed size of the ``key`` byte array for :func:`blake2b` hash""" +BLAKE2B_KEYBYTES_MAX = nacl.bindings.crypto_generichash_KEYBYTES_MAX +"""Maximum allowed size of the ``key`` byte array for :func:`blake2b` hash""" +BLAKE2B_SALTBYTES = nacl.bindings.crypto_generichash_SALTBYTES +"""Maximum allowed length of the ``salt`` byte array for +:func:`blake2b` hash""" +BLAKE2B_PERSONALBYTES = nacl.bindings.crypto_generichash_PERSONALBYTES +"""Maximum allowed length of the ``personalization`` +byte array for :func:`blake2b` hash""" + +SIPHASH_BYTES = nacl.bindings.crypto_shorthash_siphash24_BYTES +"""Size of the :func:`siphash24` digest""" +SIPHASH_KEYBYTES = nacl.bindings.crypto_shorthash_siphash24_KEYBYTES +"""Size of the secret ``key`` used by the :func:`siphash24` MAC""" + +SIPHASHX_AVAILABLE = nacl.bindings.has_crypto_shorthash_siphashx24 +"""``True`` if :func:`siphashx24` is available to be called""" + +SIPHASHX_BYTES = nacl.bindings.crypto_shorthash_siphashx24_BYTES +"""Size of the :func:`siphashx24` digest""" +SIPHASHX_KEYBYTES = nacl.bindings.crypto_shorthash_siphashx24_KEYBYTES +"""Size of the secret ``key`` used by the :func:`siphashx24` MAC""" + +_b2b_hash = nacl.bindings.crypto_generichash_blake2b_salt_personal +_sip_hash = nacl.bindings.crypto_shorthash_siphash24 +_sip_hashx = nacl.bindings.crypto_shorthash_siphashx24 + + +def sha256( + message: bytes, encoder: nacl.encoding.Encoder = nacl.encoding.HexEncoder +) -> bytes: + """ + Hashes ``message`` with SHA256. + + :param message: The message to hash. + :type message: bytes + :param encoder: A class that is able to encode the hashed message. + :returns: The hashed message. + :rtype: bytes + """ + return encoder.encode(nacl.bindings.crypto_hash_sha256(message)) + + +def sha512( + message: bytes, encoder: nacl.encoding.Encoder = nacl.encoding.HexEncoder +) -> bytes: + """ + Hashes ``message`` with SHA512. + + :param message: The message to hash. + :type message: bytes + :param encoder: A class that is able to encode the hashed message. + :returns: The hashed message. + :rtype: bytes + """ + return encoder.encode(nacl.bindings.crypto_hash_sha512(message)) + + +def blake2b( + data: bytes, + digest_size: int = BLAKE2B_BYTES, + key: bytes = b"", + salt: bytes = b"", + person: bytes = b"", + encoder: nacl.encoding.Encoder = nacl.encoding.HexEncoder, +) -> bytes: + """ + Hashes ``data`` with blake2b. + + :param data: the digest input byte sequence + :type data: bytes + :param digest_size: the requested digest size; must be at most + :const:`BLAKE2B_BYTES_MAX`; + the default digest size is + :const:`BLAKE2B_BYTES` + :type digest_size: int + :param key: the key to be set for keyed MAC/PRF usage; if set, the key + must be at most :data:`~nacl.hash.BLAKE2B_KEYBYTES_MAX` long + :type key: bytes + :param salt: an initialization salt at most + :const:`BLAKE2B_SALTBYTES` long; + it will be zero-padded if needed + :type salt: bytes + :param person: a personalization string at most + :const:`BLAKE2B_PERSONALBYTES` long; + it will be zero-padded if needed + :type person: bytes + :param encoder: the encoder to use on returned digest + :type encoder: class + :returns: The hashed message. + :rtype: bytes + """ + + digest = _b2b_hash( + data, digest_size=digest_size, key=key, salt=salt, person=person + ) + return encoder.encode(digest) + + +generichash = blake2b + + +def siphash24( + message: bytes, + key: bytes = b"", + encoder: nacl.encoding.Encoder = nacl.encoding.HexEncoder, +) -> bytes: + """ + Computes a keyed MAC of ``message`` using the short-input-optimized + siphash-2-4 construction. + + :param message: The message to hash. + :type message: bytes + :param key: the message authentication key for the siphash MAC construct + :type key: bytes(:const:`SIPHASH_KEYBYTES`) + :param encoder: A class that is able to encode the hashed message. + :returns: The hashed message. + :rtype: bytes(:const:`SIPHASH_BYTES`) + """ + digest = _sip_hash(message, key) + return encoder.encode(digest) + + +shorthash = siphash24 + + +def siphashx24( + message: bytes, + key: bytes = b"", + encoder: nacl.encoding.Encoder = nacl.encoding.HexEncoder, +) -> bytes: + """ + Computes a keyed MAC of ``message`` using the 128 bit variant of the + siphash-2-4 construction. + + :param message: The message to hash. + :type message: bytes + :param key: the message authentication key for the siphash MAC construct + :type key: bytes(:const:`SIPHASHX_KEYBYTES`) + :param encoder: A class that is able to encode the hashed message. + :returns: The hashed message. + :rtype: bytes(:const:`SIPHASHX_BYTES`) + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + + .. versionadded:: 1.2 + """ + digest = _sip_hashx(message, key) + return encoder.encode(digest) diff --git a/.venv/lib/python3.9/site-packages/nacl/hashlib.py b/.venv/lib/python3.9/site-packages/nacl/hashlib.py new file mode 100644 index 0000000..9d289da --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/hashlib.py @@ -0,0 +1,143 @@ +# Copyright 2016-2019 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import binascii +from typing import NoReturn + +import nacl.bindings +from nacl.utils import bytes_as_string + +BYTES = nacl.bindings.crypto_generichash_BYTES +BYTES_MIN = nacl.bindings.crypto_generichash_BYTES_MIN +BYTES_MAX = nacl.bindings.crypto_generichash_BYTES_MAX +KEYBYTES = nacl.bindings.crypto_generichash_KEYBYTES +KEYBYTES_MIN = nacl.bindings.crypto_generichash_KEYBYTES_MIN +KEYBYTES_MAX = nacl.bindings.crypto_generichash_KEYBYTES_MAX +SALTBYTES = nacl.bindings.crypto_generichash_SALTBYTES +PERSONALBYTES = nacl.bindings.crypto_generichash_PERSONALBYTES + +SCRYPT_AVAILABLE = nacl.bindings.has_crypto_pwhash_scryptsalsa208sha256 + +_b2b_init = nacl.bindings.crypto_generichash_blake2b_init +_b2b_final = nacl.bindings.crypto_generichash_blake2b_final +_b2b_update = nacl.bindings.crypto_generichash_blake2b_update + + +class blake2b: + """ + :py:mod:`hashlib` API compatible blake2b algorithm implementation + """ + + MAX_DIGEST_SIZE = BYTES + MAX_KEY_SIZE = KEYBYTES_MAX + PERSON_SIZE = PERSONALBYTES + SALT_SIZE = SALTBYTES + + def __init__( + self, + data: bytes = b"", + digest_size: int = BYTES, + key: bytes = b"", + salt: bytes = b"", + person: bytes = b"", + ): + """ + :py:class:`.blake2b` algorithm initializer + + :param data: + :type data: bytes + :param int digest_size: the requested digest size; must be + at most :py:attr:`.MAX_DIGEST_SIZE`; + the default digest size is :py:data:`.BYTES` + :param key: the key to be set for keyed MAC/PRF usage; if set, + the key must be at most :py:data:`.KEYBYTES_MAX` long + :type key: bytes + :param salt: a initialization salt at most + :py:attr:`.SALT_SIZE` long; it will be zero-padded + if needed + :type salt: bytes + :param person: a personalization string at most + :py:attr:`.PERSONAL_SIZE` long; it will be zero-padded + if needed + :type person: bytes + """ + + self._state = _b2b_init( + key=key, salt=salt, person=person, digest_size=digest_size + ) + self._digest_size = digest_size + + if data: + self.update(data) + + @property + def digest_size(self) -> int: + return self._digest_size + + @property + def block_size(self) -> int: + return 128 + + @property + def name(self) -> str: + return "blake2b" + + def update(self, data: bytes) -> None: + _b2b_update(self._state, data) + + def digest(self) -> bytes: + _st = self._state.copy() + return _b2b_final(_st) + + def hexdigest(self) -> str: + return bytes_as_string(binascii.hexlify(self.digest())) + + def copy(self) -> "blake2b": + _cp = type(self)(digest_size=self.digest_size) + _st = self._state.copy() + _cp._state = _st + return _cp + + def __reduce__(self) -> NoReturn: + """ + Raise the same exception as hashlib's blake implementation + on copy.copy() + """ + raise TypeError( + "can't pickle {} objects".format(self.__class__.__name__) + ) + + +def scrypt( + password: bytes, + salt: bytes = b"", + n: int = 2**20, + r: int = 8, + p: int = 1, + maxmem: int = 2**25, + dklen: int = 64, +) -> bytes: + """ + Derive a cryptographic key using the scrypt KDF. + + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + + Implements the same signature as the ``hashlib.scrypt`` implemented + in cpython version 3.6 + """ + return nacl.bindings.crypto_pwhash_scryptsalsa208sha256_ll( + password, salt, n, r, p, maxmem=maxmem, dklen=dklen + ) diff --git a/.venv/lib/python3.9/site-packages/nacl/public.py b/.venv/lib/python3.9/site-packages/nacl/public.py new file mode 100644 index 0000000..a6fc958 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/public.py @@ -0,0 +1,421 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import ClassVar, Generic, Optional, Type, TypeVar + +import nacl.bindings +from nacl import encoding +from nacl import exceptions as exc +from nacl.encoding import Encoder +from nacl.utils import EncryptedMessage, StringFixer, random + + +class PublicKey(encoding.Encodable, StringFixer): + """ + The public key counterpart to an Curve25519 :class:`nacl.public.PrivateKey` + for encrypting messages. + + :param public_key: [:class:`bytes`] Encoded Curve25519 public key + :param encoder: A class that is able to decode the `public_key` + + :cvar SIZE: The size that the public key is required to be + """ + + SIZE: ClassVar[int] = nacl.bindings.crypto_box_PUBLICKEYBYTES + + def __init__( + self, + public_key: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ): + self._public_key = encoder.decode(public_key) + if not isinstance(self._public_key, bytes): + raise exc.TypeError("PublicKey must be created from 32 bytes") + + if len(self._public_key) != self.SIZE: + raise exc.ValueError( + "The public key must be exactly {} bytes long".format( + self.SIZE + ) + ) + + def __bytes__(self) -> bytes: + return self._public_key + + def __hash__(self) -> int: + return hash(bytes(self)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return False + return nacl.bindings.sodium_memcmp(bytes(self), bytes(other)) + + def __ne__(self, other: object) -> bool: + return not (self == other) + + +class PrivateKey(encoding.Encodable, StringFixer): + """ + Private key for decrypting messages using the Curve25519 algorithm. + + .. warning:: This **must** be protected and remain secret. Anyone who + knows the value of your :class:`~nacl.public.PrivateKey` can decrypt + any message encrypted by the corresponding + :class:`~nacl.public.PublicKey` + + :param private_key: The private key used to decrypt messages + :param encoder: The encoder class used to decode the given keys + + :cvar SIZE: The size that the private key is required to be + :cvar SEED_SIZE: The size that the seed used to generate the + private key is required to be + """ + + SIZE: ClassVar[int] = nacl.bindings.crypto_box_SECRETKEYBYTES + SEED_SIZE: ClassVar[int] = nacl.bindings.crypto_box_SEEDBYTES + + def __init__( + self, + private_key: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ): + # Decode the secret_key + private_key = encoder.decode(private_key) + # verify the given secret key type and size are correct + if not ( + isinstance(private_key, bytes) and len(private_key) == self.SIZE + ): + raise exc.TypeError( + ( + "PrivateKey must be created from a {} bytes long raw secret key" + ).format(self.SIZE) + ) + + raw_public_key = nacl.bindings.crypto_scalarmult_base(private_key) + + self._private_key = private_key + self.public_key = PublicKey(raw_public_key) + + @classmethod + def from_seed( + cls, + seed: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> "PrivateKey": + """ + Generate a PrivateKey using a deterministic construction + starting from a caller-provided seed + + .. warning:: The seed **must** be high-entropy; therefore, + its generator **must** be a cryptographic quality + random function like, for example, :func:`~nacl.utils.random`. + + .. warning:: The seed **must** be protected and remain secret. + Anyone who knows the seed is really in possession of + the corresponding PrivateKey. + + :param seed: The seed used to generate the private key + :rtype: :class:`~nacl.public.PrivateKey` + """ + # decode the seed + seed = encoder.decode(seed) + # Verify the given seed type and size are correct + if not (isinstance(seed, bytes) and len(seed) == cls.SEED_SIZE): + raise exc.TypeError( + ( + "PrivateKey seed must be a {} bytes long binary sequence" + ).format(cls.SEED_SIZE) + ) + # generate a raw key pair from the given seed + raw_pk, raw_sk = nacl.bindings.crypto_box_seed_keypair(seed) + # construct a instance from the raw secret key + return cls(raw_sk) + + def __bytes__(self) -> bytes: + return self._private_key + + def __hash__(self) -> int: + return hash((type(self), bytes(self.public_key))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return False + return self.public_key == other.public_key + + def __ne__(self, other: object) -> bool: + return not (self == other) + + @classmethod + def generate(cls) -> "PrivateKey": + """ + Generates a random :class:`~nacl.public.PrivateKey` object + + :rtype: :class:`~nacl.public.PrivateKey` + """ + return cls(random(PrivateKey.SIZE), encoder=encoding.RawEncoder) + + +_Box = TypeVar("_Box", bound="Box") + + +class Box(encoding.Encodable, StringFixer): + """ + The Box class boxes and unboxes messages between a pair of keys + + The ciphertexts generated by :class:`~nacl.public.Box` include a 16 + byte authenticator which is checked as part of the decryption. An invalid + authenticator will cause the decrypt function to raise an exception. The + authenticator is not a signature. Once you've decrypted the message you've + demonstrated the ability to create arbitrary valid message, so messages you + send are repudiable. For non-repudiable messages, sign them after + encryption. + + :param private_key: :class:`~nacl.public.PrivateKey` used to encrypt and + decrypt messages + :param public_key: :class:`~nacl.public.PublicKey` used to encrypt and + decrypt messages + + :cvar NONCE_SIZE: The size that the nonce is required to be. + """ + + NONCE_SIZE: ClassVar[int] = nacl.bindings.crypto_box_NONCEBYTES + _shared_key: bytes + + def __init__(self, private_key: PrivateKey, public_key: PublicKey): + if not isinstance(private_key, PrivateKey) or not isinstance( + public_key, PublicKey + ): + raise exc.TypeError( + "Box must be created from a PrivateKey and a PublicKey" + ) + self._shared_key = nacl.bindings.crypto_box_beforenm( + public_key.encode(encoder=encoding.RawEncoder), + private_key.encode(encoder=encoding.RawEncoder), + ) + + def __bytes__(self) -> bytes: + return self._shared_key + + @classmethod + def decode( + cls: Type[_Box], encoded: bytes, encoder: Encoder = encoding.RawEncoder + ) -> _Box: + """ + Alternative constructor. Creates a Box from an existing Box's shared key. + """ + # Create an empty box + box: _Box = cls.__new__(cls) + + # Assign our decoded value to the shared key of the box + box._shared_key = encoder.decode(encoded) + + return box + + def encrypt( + self, + plaintext: bytes, + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> EncryptedMessage: + """ + Encrypts the plaintext message using the given `nonce` (or generates + one randomly if omitted) and returns the ciphertext encoded with the + encoder. + + .. warning:: It is **VITALLY** important that the nonce is a nonce, + i.e. it is a number used only once for any given key. If you fail + to do this, you compromise the privacy of the messages encrypted. + + :param plaintext: [:class:`bytes`] The plaintext message to encrypt + :param nonce: [:class:`bytes`] The nonce to use in the encryption + :param encoder: The encoder to use to encode the ciphertext + :rtype: [:class:`nacl.utils.EncryptedMessage`] + """ + if nonce is None: + nonce = random(self.NONCE_SIZE) + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE + ) + + ciphertext = nacl.bindings.crypto_box_easy_afternm( + plaintext, + nonce, + self._shared_key, + ) + + encoded_nonce = encoder.encode(nonce) + encoded_ciphertext = encoder.encode(ciphertext) + + return EncryptedMessage._from_parts( + encoded_nonce, + encoded_ciphertext, + encoder.encode(nonce + ciphertext), + ) + + def decrypt( + self, + ciphertext: bytes, + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Decrypts the ciphertext using the `nonce` (explicitly, when passed as a + parameter or implicitly, when omitted, as part of the ciphertext) and + returns the plaintext message. + + :param ciphertext: [:class:`bytes`] The encrypted message to decrypt + :param nonce: [:class:`bytes`] The nonce used when encrypting the + ciphertext + :param encoder: The encoder used to decode the ciphertext. + :rtype: [:class:`bytes`] + """ + # Decode our ciphertext + ciphertext = encoder.decode(ciphertext) + + if nonce is None: + # If we were given the nonce and ciphertext combined, split them. + nonce = ciphertext[: self.NONCE_SIZE] + ciphertext = ciphertext[self.NONCE_SIZE :] + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE + ) + + plaintext = nacl.bindings.crypto_box_open_easy_afternm( + ciphertext, + nonce, + self._shared_key, + ) + + return plaintext + + def shared_key(self) -> bytes: + """ + Returns the Curve25519 shared secret, that can then be used as a key in + other symmetric ciphers. + + .. warning:: It is **VITALLY** important that you use a nonce with your + symmetric cipher. If you fail to do this, you compromise the + privacy of the messages encrypted. Ensure that the key length of + your cipher is 32 bytes. + :rtype: [:class:`bytes`] + """ + + return self._shared_key + + +_Key = TypeVar("_Key", PublicKey, PrivateKey) + + +class SealedBox(Generic[_Key], encoding.Encodable, StringFixer): + """ + The SealedBox class boxes and unboxes messages addressed to + a specified key-pair by using ephemeral sender's key pairs, + whose private part will be discarded just after encrypting + a single plaintext message. + + The ciphertexts generated by :class:`~nacl.public.SecretBox` include + the public part of the ephemeral key before the :class:`~nacl.public.Box` + ciphertext. + + :param recipient_key: a :class:`~nacl.public.PublicKey` used to encrypt + messages and derive nonces, or a :class:`~nacl.public.PrivateKey` used + to decrypt messages. + + .. versionadded:: 1.2 + """ + + _public_key: bytes + _private_key: Optional[bytes] + + def __init__(self, recipient_key: _Key): + if isinstance(recipient_key, PublicKey): + self._public_key = recipient_key.encode( + encoder=encoding.RawEncoder + ) + self._private_key = None + elif isinstance(recipient_key, PrivateKey): + self._private_key = recipient_key.encode( + encoder=encoding.RawEncoder + ) + self._public_key = recipient_key.public_key.encode( + encoder=encoding.RawEncoder + ) + else: + raise exc.TypeError( + "SealedBox must be created from a PublicKey or a PrivateKey" + ) + + def __bytes__(self) -> bytes: + return self._public_key + + def encrypt( + self, + plaintext: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Encrypts the plaintext message using a random-generated ephemeral + key pair and returns a "composed ciphertext", containing both + the public part of the key pair and the ciphertext proper, + encoded with the encoder. + + The private part of the ephemeral key-pair will be scrubbed before + returning the ciphertext, therefore, the sender will not be able to + decrypt the generated ciphertext. + + :param plaintext: [:class:`bytes`] The plaintext message to encrypt + :param encoder: The encoder to use to encode the ciphertext + :return bytes: encoded ciphertext + """ + + ciphertext = nacl.bindings.crypto_box_seal(plaintext, self._public_key) + + encoded_ciphertext = encoder.encode(ciphertext) + + return encoded_ciphertext + + def decrypt( + self: "SealedBox[PrivateKey]", + ciphertext: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Decrypts the ciphertext using the ephemeral public key enclosed + in the ciphertext and the SealedBox private key, returning + the plaintext message. + + :param ciphertext: [:class:`bytes`] The encrypted message to decrypt + :param encoder: The encoder used to decode the ciphertext. + :return bytes: The original plaintext + :raises TypeError: if this SealedBox was created with a + :class:`~nacl.public.PublicKey` rather than a + :class:`~nacl.public.PrivateKey`. + """ + # Decode our ciphertext + ciphertext = encoder.decode(ciphertext) + + if self._private_key is None: + raise TypeError( + "SealedBoxes created with a public key cannot decrypt" + ) + plaintext = nacl.bindings.crypto_box_seal_open( + ciphertext, + self._public_key, + self._private_key, + ) + + return plaintext diff --git a/.venv/lib/python3.9/site-packages/nacl/pwhash/__init__.py b/.venv/lib/python3.9/site-packages/nacl/pwhash/__init__.py new file mode 100644 index 0000000..ffd76a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/pwhash/__init__.py @@ -0,0 +1,75 @@ +# Copyright 2017 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl.exceptions import CryptPrefixError + +from . import _argon2, argon2i, argon2id, scrypt + +STRPREFIX = argon2id.STRPREFIX + +PWHASH_SIZE = argon2id.PWHASH_SIZE + +assert _argon2.ALG_ARGON2_DEFAULT == _argon2.ALG_ARGON2ID13 +# since version 1.0.15 of libsodium + +PASSWD_MIN = argon2id.PASSWD_MIN +PASSWD_MAX = argon2id.PASSWD_MAX +MEMLIMIT_MAX = argon2id.MEMLIMIT_MAX +MEMLIMIT_MIN = argon2id.MEMLIMIT_MIN +OPSLIMIT_MAX = argon2id.OPSLIMIT_MAX +OPSLIMIT_MIN = argon2id.OPSLIMIT_MIN +OPSLIMIT_INTERACTIVE = argon2id.OPSLIMIT_INTERACTIVE +MEMLIMIT_INTERACTIVE = argon2id.MEMLIMIT_INTERACTIVE +OPSLIMIT_MODERATE = argon2id.OPSLIMIT_MODERATE +MEMLIMIT_MODERATE = argon2id.MEMLIMIT_MODERATE +OPSLIMIT_SENSITIVE = argon2id.OPSLIMIT_SENSITIVE +MEMLIMIT_SENSITIVE = argon2id.MEMLIMIT_SENSITIVE + +str = argon2id.str + +assert argon2i.ALG != argon2id.ALG + +SCRYPT_SALTBYTES = scrypt.SALTBYTES +SCRYPT_PWHASH_SIZE = scrypt.PWHASH_SIZE +SCRYPT_OPSLIMIT_INTERACTIVE = scrypt.OPSLIMIT_INTERACTIVE +SCRYPT_MEMLIMIT_INTERACTIVE = scrypt.MEMLIMIT_INTERACTIVE +SCRYPT_OPSLIMIT_SENSITIVE = scrypt.OPSLIMIT_SENSITIVE +SCRYPT_MEMLIMIT_SENSITIVE = scrypt.MEMLIMIT_SENSITIVE + + +kdf_scryptsalsa208sha256 = scrypt.kdf +scryptsalsa208sha256_str = scrypt.str +verify_scryptsalsa208sha256 = scrypt.verify + + +def verify(password_hash: bytes, password: bytes) -> bool: + """ + Takes a modular crypt encoded stored password hash derived using one + of the algorithms supported by `libsodium` and checks if the user provided + password will hash to the same string when using the parameters saved + in the stored hash + """ + if password_hash.startswith(argon2id.STRPREFIX): + return argon2id.verify(password_hash, password) + elif password_hash.startswith(argon2i.STRPREFIX): + return argon2id.verify(password_hash, password) + elif scrypt.AVAILABLE and password_hash.startswith(scrypt.STRPREFIX): + return scrypt.verify(password_hash, password) + else: + raise ( + CryptPrefixError( + "given password_hash is not in a supported format" + ) + ) diff --git a/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e00bdcc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/_argon2.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/_argon2.cpython-39.pyc new file mode 100644 index 0000000..5002594 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/_argon2.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/argon2i.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/argon2i.cpython-39.pyc new file mode 100644 index 0000000..1bd3499 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/argon2i.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/argon2id.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/argon2id.cpython-39.pyc new file mode 100644 index 0000000..d938bcb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/argon2id.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/scrypt.cpython-39.pyc b/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/scrypt.cpython-39.pyc new file mode 100644 index 0000000..47cb802 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/nacl/pwhash/__pycache__/scrypt.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/nacl/pwhash/_argon2.py b/.venv/lib/python3.9/site-packages/nacl/pwhash/_argon2.py new file mode 100644 index 0000000..856eda0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/pwhash/_argon2.py @@ -0,0 +1,49 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import nacl.bindings + +_argon2_strbytes_plus_one = nacl.bindings.crypto_pwhash_STRBYTES + +PWHASH_SIZE = _argon2_strbytes_plus_one - 1 +SALTBYTES = nacl.bindings.crypto_pwhash_SALTBYTES + +PASSWD_MIN = nacl.bindings.crypto_pwhash_PASSWD_MIN +PASSWD_MAX = nacl.bindings.crypto_pwhash_PASSWD_MAX + +PWHASH_SIZE = _argon2_strbytes_plus_one - 1 + +BYTES_MAX = nacl.bindings.crypto_pwhash_BYTES_MAX +BYTES_MIN = nacl.bindings.crypto_pwhash_BYTES_MIN + +ALG_ARGON2I13 = nacl.bindings.crypto_pwhash_ALG_ARGON2I13 +ALG_ARGON2ID13 = nacl.bindings.crypto_pwhash_ALG_ARGON2ID13 +ALG_ARGON2_DEFAULT = nacl.bindings.crypto_pwhash_ALG_DEFAULT + + +def verify(password_hash: bytes, password: bytes) -> bool: + """ + Takes a modular crypt encoded argon2i or argon2id stored password hash + and checks if the user provided password will hash to the same string + when using the stored parameters + + :param password_hash: password hash serialized in modular crypt() format + :type password_hash: bytes + :param password: user provided password + :type password: bytes + :rtype: boolean + + .. versionadded:: 1.2 + """ + return nacl.bindings.crypto_pwhash_str_verify(password_hash, password) diff --git a/.venv/lib/python3.9/site-packages/nacl/pwhash/argon2i.py b/.venv/lib/python3.9/site-packages/nacl/pwhash/argon2i.py new file mode 100644 index 0000000..f9b3af7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/pwhash/argon2i.py @@ -0,0 +1,132 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import nacl.bindings +import nacl.encoding + +from . import _argon2 + +ALG = _argon2.ALG_ARGON2I13 +STRPREFIX = nacl.bindings.crypto_pwhash_argon2i_STRPREFIX + +SALTBYTES = _argon2.SALTBYTES + +PASSWD_MIN = _argon2.PASSWD_MIN +PASSWD_MAX = _argon2.PASSWD_MAX + +PWHASH_SIZE = _argon2.PWHASH_SIZE + +BYTES_MIN = _argon2.BYTES_MIN +BYTES_MAX = _argon2.BYTES_MAX + +verify = _argon2.verify + +MEMLIMIT_MAX = nacl.bindings.crypto_pwhash_argon2i_MEMLIMIT_MAX +MEMLIMIT_MIN = nacl.bindings.crypto_pwhash_argon2i_MEMLIMIT_MIN +OPSLIMIT_MAX = nacl.bindings.crypto_pwhash_argon2i_OPSLIMIT_MAX +OPSLIMIT_MIN = nacl.bindings.crypto_pwhash_argon2i_OPSLIMIT_MIN + +OPSLIMIT_INTERACTIVE = nacl.bindings.crypto_pwhash_argon2i_OPSLIMIT_INTERACTIVE +MEMLIMIT_INTERACTIVE = nacl.bindings.crypto_pwhash_argon2i_MEMLIMIT_INTERACTIVE +OPSLIMIT_SENSITIVE = nacl.bindings.crypto_pwhash_argon2i_OPSLIMIT_SENSITIVE +MEMLIMIT_SENSITIVE = nacl.bindings.crypto_pwhash_argon2i_MEMLIMIT_SENSITIVE + +OPSLIMIT_MODERATE = nacl.bindings.crypto_pwhash_argon2i_OPSLIMIT_MODERATE +MEMLIMIT_MODERATE = nacl.bindings.crypto_pwhash_argon2i_MEMLIMIT_MODERATE + + +def kdf( + size: int, + password: bytes, + salt: bytes, + opslimit: int = OPSLIMIT_SENSITIVE, + memlimit: int = MEMLIMIT_SENSITIVE, + encoder: nacl.encoding.Encoder = nacl.encoding.RawEncoder, +) -> bytes: + """ + Derive a ``size`` bytes long key from a caller-supplied + ``password`` and ``salt`` pair using the argon2i + memory-hard construct. + + the enclosing module provides the constants + + - :py:const:`.OPSLIMIT_INTERACTIVE` + - :py:const:`.MEMLIMIT_INTERACTIVE` + - :py:const:`.OPSLIMIT_MODERATE` + - :py:const:`.MEMLIMIT_MODERATE` + - :py:const:`.OPSLIMIT_SENSITIVE` + - :py:const:`.MEMLIMIT_SENSITIVE` + + as a guidance for correct settings. + + :param size: derived key size, must be between + :py:const:`.BYTES_MIN` and + :py:const:`.BYTES_MAX` + :type size: int + :param password: password used to seed the key derivation procedure; + it length must be between + :py:const:`.PASSWD_MIN` and + :py:const:`.PASSWD_MAX` + :type password: bytes + :param salt: **RANDOM** salt used in the key derivation procedure; + its length must be exactly :py:const:`.SALTBYTES` + :type salt: bytes + :param opslimit: the time component (operation count) + of the key derivation procedure's computational cost; + it must be between + :py:const:`.OPSLIMIT_MIN` and + :py:const:`.OPSLIMIT_MAX` + :type opslimit: int + :param memlimit: the memory occupation component + of the key derivation procedure's computational cost; + it must be between + :py:const:`.MEMLIMIT_MIN` and + :py:const:`.MEMLIMIT_MAX` + :type memlimit: int + :rtype: bytes + + .. versionadded:: 1.2 + """ + + return encoder.encode( + nacl.bindings.crypto_pwhash_alg( + size, password, salt, opslimit, memlimit, ALG + ) + ) + + +def str( + password: bytes, + opslimit: int = OPSLIMIT_INTERACTIVE, + memlimit: int = MEMLIMIT_INTERACTIVE, +) -> bytes: + """ + Hashes a password with a random salt, using the memory-hard + argon2i construct and returning an ascii string that has all + the needed info to check against a future password + + + The default settings for opslimit and memlimit are those deemed + correct for the interactive user login case. + + :param bytes password: + :param int opslimit: + :param int memlimit: + :rtype: bytes + + .. versionadded:: 1.2 + """ + return nacl.bindings.crypto_pwhash_str_alg( + password, opslimit, memlimit, ALG + ) diff --git a/.venv/lib/python3.9/site-packages/nacl/pwhash/argon2id.py b/.venv/lib/python3.9/site-packages/nacl/pwhash/argon2id.py new file mode 100644 index 0000000..f3aa3f7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/pwhash/argon2id.py @@ -0,0 +1,135 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import nacl.bindings +import nacl.encoding + +from . import _argon2 + +ALG = _argon2.ALG_ARGON2ID13 +STRPREFIX = nacl.bindings.crypto_pwhash_argon2id_STRPREFIX + +SALTBYTES = _argon2.SALTBYTES + +PASSWD_MIN = _argon2.PASSWD_MIN +PASSWD_MAX = _argon2.PASSWD_MAX + +PWHASH_SIZE = _argon2.PWHASH_SIZE + +BYTES_MIN = _argon2.BYTES_MIN +BYTES_MAX = _argon2.BYTES_MAX + +verify = _argon2.verify + +MEMLIMIT_MIN = nacl.bindings.crypto_pwhash_argon2id_MEMLIMIT_MIN +MEMLIMIT_MAX = nacl.bindings.crypto_pwhash_argon2id_MEMLIMIT_MAX +OPSLIMIT_MIN = nacl.bindings.crypto_pwhash_argon2id_OPSLIMIT_MIN +OPSLIMIT_MAX = nacl.bindings.crypto_pwhash_argon2id_OPSLIMIT_MAX + +OPSLIMIT_INTERACTIVE = ( + nacl.bindings.crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE +) +MEMLIMIT_INTERACTIVE = ( + nacl.bindings.crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE +) +OPSLIMIT_SENSITIVE = nacl.bindings.crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE +MEMLIMIT_SENSITIVE = nacl.bindings.crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE + +OPSLIMIT_MODERATE = nacl.bindings.crypto_pwhash_argon2id_OPSLIMIT_MODERATE +MEMLIMIT_MODERATE = nacl.bindings.crypto_pwhash_argon2id_MEMLIMIT_MODERATE + + +def kdf( + size: int, + password: bytes, + salt: bytes, + opslimit: int = OPSLIMIT_SENSITIVE, + memlimit: int = MEMLIMIT_SENSITIVE, + encoder: nacl.encoding.Encoder = nacl.encoding.RawEncoder, +) -> bytes: + """ + Derive a ``size`` bytes long key from a caller-supplied + ``password`` and ``salt`` pair using the argon2id + memory-hard construct. + + the enclosing module provides the constants + + - :py:const:`.OPSLIMIT_INTERACTIVE` + - :py:const:`.MEMLIMIT_INTERACTIVE` + - :py:const:`.OPSLIMIT_MODERATE` + - :py:const:`.MEMLIMIT_MODERATE` + - :py:const:`.OPSLIMIT_SENSITIVE` + - :py:const:`.MEMLIMIT_SENSITIVE` + + as a guidance for correct settings. + + :param size: derived key size, must be between + :py:const:`.BYTES_MIN` and + :py:const:`.BYTES_MAX` + :type size: int + :param password: password used to seed the key derivation procedure; + it length must be between + :py:const:`.PASSWD_MIN` and + :py:const:`.PASSWD_MAX` + :type password: bytes + :param salt: **RANDOM** salt used in the key derivation procedure; + its length must be exactly :py:const:`.SALTBYTES` + :type salt: bytes + :param opslimit: the time component (operation count) + of the key derivation procedure's computational cost; + it must be between + :py:const:`.OPSLIMIT_MIN` and + :py:const:`.OPSLIMIT_MAX` + :type opslimit: int + :param memlimit: the memory occupation component + of the key derivation procedure's computational cost; + it must be between + :py:const:`.MEMLIMIT_MIN` and + :py:const:`.MEMLIMIT_MAX` + :type memlimit: int + :rtype: bytes + + .. versionadded:: 1.2 + """ + + return encoder.encode( + nacl.bindings.crypto_pwhash_alg( + size, password, salt, opslimit, memlimit, ALG + ) + ) + + +def str( + password: bytes, + opslimit: int = OPSLIMIT_INTERACTIVE, + memlimit: int = MEMLIMIT_INTERACTIVE, +) -> bytes: + """ + Hashes a password with a random salt, using the memory-hard + argon2id construct and returning an ascii string that has all + the needed info to check against a future password + + The default settings for opslimit and memlimit are those deemed + correct for the interactive user login case. + + :param bytes password: + :param int opslimit: + :param int memlimit: + :rtype: bytes + + .. versionadded:: 1.2 + """ + return nacl.bindings.crypto_pwhash_str_alg( + password, opslimit, memlimit, ALG + ) diff --git a/.venv/lib/python3.9/site-packages/nacl/pwhash/scrypt.py b/.venv/lib/python3.9/site-packages/nacl/pwhash/scrypt.py new file mode 100644 index 0000000..b9fc9d8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/pwhash/scrypt.py @@ -0,0 +1,211 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import cast + +import nacl.bindings +import nacl.encoding +from nacl import exceptions as exc +from nacl.exceptions import ensure + +_strbytes_plus_one = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_STRBYTES + +AVAILABLE = nacl.bindings.has_crypto_pwhash_scryptsalsa208sha256 + +STRPREFIX = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_STRPREFIX + +SALTBYTES = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_SALTBYTES + +PASSWD_MIN = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN +PASSWD_MAX = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX + +PWHASH_SIZE = _strbytes_plus_one - 1 + +BYTES_MIN = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_BYTES_MIN +BYTES_MAX = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_BYTES_MAX + +MEMLIMIT_MIN = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN +MEMLIMIT_MAX = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX +OPSLIMIT_MIN = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN +OPSLIMIT_MAX = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX + +OPSLIMIT_INTERACTIVE = ( + nacl.bindings.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE +) +MEMLIMIT_INTERACTIVE = ( + nacl.bindings.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE +) +OPSLIMIT_SENSITIVE = ( + nacl.bindings.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE +) +MEMLIMIT_SENSITIVE = ( + nacl.bindings.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE +) + +OPSLIMIT_MODERATE = 8 * OPSLIMIT_INTERACTIVE +MEMLIMIT_MODERATE = 8 * MEMLIMIT_INTERACTIVE + + +def kdf( + size: int, + password: bytes, + salt: bytes, + opslimit: int = OPSLIMIT_SENSITIVE, + memlimit: int = MEMLIMIT_SENSITIVE, + encoder: nacl.encoding.Encoder = nacl.encoding.RawEncoder, +) -> bytes: + """ + Derive a ``size`` bytes long key from a caller-supplied + ``password`` and ``salt`` pair using the scryptsalsa208sha256 + memory-hard construct. + + + the enclosing module provides the constants + + - :py:const:`.OPSLIMIT_INTERACTIVE` + - :py:const:`.MEMLIMIT_INTERACTIVE` + - :py:const:`.OPSLIMIT_SENSITIVE` + - :py:const:`.MEMLIMIT_SENSITIVE` + - :py:const:`.OPSLIMIT_MODERATE` + - :py:const:`.MEMLIMIT_MODERATE` + + as a guidance for correct settings respectively for the + interactive login and the long term key protecting sensitive data + use cases. + + :param size: derived key size, must be between + :py:const:`.BYTES_MIN` and + :py:const:`.BYTES_MAX` + :type size: int + :param password: password used to seed the key derivation procedure; + it length must be between + :py:const:`.PASSWD_MIN` and + :py:const:`.PASSWD_MAX` + :type password: bytes + :param salt: **RANDOM** salt used in the key derivation procedure; + its length must be exactly :py:const:`.SALTBYTES` + :type salt: bytes + :param opslimit: the time component (operation count) + of the key derivation procedure's computational cost; + it must be between + :py:const:`.OPSLIMIT_MIN` and + :py:const:`.OPSLIMIT_MAX` + :type opslimit: int + :param memlimit: the memory occupation component + of the key derivation procedure's computational cost; + it must be between + :py:const:`.MEMLIMIT_MIN` and + :py:const:`.MEMLIMIT_MAX` + :type memlimit: int + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + + .. versionadded:: 1.2 + """ + ensure( + AVAILABLE, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + len(salt) == SALTBYTES, + "The salt must be exactly %s, not %s bytes long" + % (SALTBYTES, len(salt)), + raising=exc.ValueError, + ) + + n_log2, r, p = nacl.bindings.nacl_bindings_pick_scrypt_params( + opslimit, memlimit + ) + maxmem = memlimit + (2**16) + + return encoder.encode( + nacl.bindings.crypto_pwhash_scryptsalsa208sha256_ll( + password, + salt, + # Cast safety: n_log2 is a positive integer, and so 2 ** n_log2 is also + # a positive integer. Mypy+typeshed can't deduce this, because there's no + # way to for them to know that n_log2: int is positive. + cast(int, 2**n_log2), + r, + p, + maxmem=maxmem, + dklen=size, + ) + ) + + +def str( + password: bytes, + opslimit: int = OPSLIMIT_INTERACTIVE, + memlimit: int = MEMLIMIT_INTERACTIVE, +) -> bytes: + """ + Hashes a password with a random salt, using the memory-hard + scryptsalsa208sha256 construct and returning an ascii string + that has all the needed info to check against a future password + + The default settings for opslimit and memlimit are those deemed + correct for the interactive user login case. + + :param bytes password: + :param int opslimit: + :param int memlimit: + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + + .. versionadded:: 1.2 + """ + ensure( + AVAILABLE, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + return nacl.bindings.crypto_pwhash_scryptsalsa208sha256_str( + password, opslimit, memlimit + ) + + +def verify(password_hash: bytes, password: bytes) -> bool: + """ + Takes the output of scryptsalsa208sha256 and compares it against + a user provided password to see if they are the same + + :param password_hash: bytes + :param password: bytes + :rtype: boolean + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + + .. versionadded:: 1.2 + """ + ensure( + AVAILABLE, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + len(password_hash) == PWHASH_SIZE, + "The password hash must be exactly %s bytes long" + % nacl.bindings.crypto_pwhash_scryptsalsa208sha256_STRBYTES, + raising=exc.ValueError, + ) + + return nacl.bindings.crypto_pwhash_scryptsalsa208sha256_str_verify( + password_hash, password + ) diff --git a/.venv/lib/python3.9/site-packages/nacl/py.typed b/.venv/lib/python3.9/site-packages/nacl/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/nacl/secret.py b/.venv/lib/python3.9/site-packages/nacl/secret.py new file mode 100644 index 0000000..5c3064f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/secret.py @@ -0,0 +1,305 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import ClassVar, Optional + +import nacl.bindings +from nacl import encoding +from nacl import exceptions as exc +from nacl.utils import EncryptedMessage, StringFixer, random + + +class SecretBox(encoding.Encodable, StringFixer): + """ + The SecretBox class encrypts and decrypts messages using the given secret + key. + + The ciphertexts generated by :class:`~nacl.secret.Secretbox` include a 16 + byte authenticator which is checked as part of the decryption. An invalid + authenticator will cause the decrypt function to raise an exception. The + authenticator is not a signature. Once you've decrypted the message you've + demonstrated the ability to create arbitrary valid message, so messages you + send are repudiable. For non-repudiable messages, sign them after + encryption. + + Encryption is done using `XSalsa20-Poly1305`_, and there are no practical + limits on the number or size of messages (up to 2⁶⁴ messages, each up to 2⁶⁴ + bytes). + + .. _XSalsa20-Poly1305: https://doc.libsodium.org/secret-key_cryptography/secretbox#algorithm-details + + :param key: The secret key used to encrypt and decrypt messages + :param encoder: The encoder class used to decode the given key + + :cvar KEY_SIZE: The size that the key is required to be. + :cvar NONCE_SIZE: The size that the nonce is required to be. + :cvar MACBYTES: The size of the authentication MAC tag in bytes. + :cvar MESSAGEBYTES_MAX: The maximum size of a message which can be + safely encrypted with a single key/nonce + pair. + """ + + KEY_SIZE: ClassVar[int] = nacl.bindings.crypto_secretbox_KEYBYTES + NONCE_SIZE: ClassVar[int] = nacl.bindings.crypto_secretbox_NONCEBYTES + MACBYTES: ClassVar[int] = nacl.bindings.crypto_secretbox_MACBYTES + MESSAGEBYTES_MAX: ClassVar[int] = ( + nacl.bindings.crypto_secretbox_MESSAGEBYTES_MAX + ) + + def __init__( + self, key: bytes, encoder: encoding.Encoder = encoding.RawEncoder + ): + key = encoder.decode(key) + if not isinstance(key, bytes): + raise exc.TypeError("SecretBox must be created from 32 bytes") + + if len(key) != self.KEY_SIZE: + raise exc.ValueError( + "The key must be exactly %s bytes long" % self.KEY_SIZE, + ) + + self._key = key + + def __bytes__(self) -> bytes: + return self._key + + def encrypt( + self, + plaintext: bytes, + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> EncryptedMessage: + """ + Encrypts the plaintext message using the given `nonce` (or generates + one randomly if omitted) and returns the ciphertext encoded with the + encoder. + + .. warning:: It is **VITALLY** important that the nonce is a nonce, + i.e. it is a number used only once for any given key. If you fail + to do this, you compromise the privacy of the messages encrypted. + Give your nonces a different prefix, or have one side use an odd + counter and one an even counter. Just make sure they are different. + + :param plaintext: [:class:`bytes`] The plaintext message to encrypt + :param nonce: [:class:`bytes`] The nonce to use in the encryption + :param encoder: The encoder to use to encode the ciphertext + :rtype: [:class:`nacl.utils.EncryptedMessage`] + """ + if nonce is None: + nonce = random(self.NONCE_SIZE) + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE, + ) + + ciphertext = nacl.bindings.crypto_secretbox_easy( + plaintext, nonce, self._key + ) + + encoded_nonce = encoder.encode(nonce) + encoded_ciphertext = encoder.encode(ciphertext) + + return EncryptedMessage._from_parts( + encoded_nonce, + encoded_ciphertext, + encoder.encode(nonce + ciphertext), + ) + + def decrypt( + self, + ciphertext: bytes, + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Decrypts the ciphertext using the `nonce` (explicitly, when passed as a + parameter or implicitly, when omitted, as part of the ciphertext) and + returns the plaintext message. + + :param ciphertext: [:class:`bytes`] The encrypted message to decrypt + :param nonce: [:class:`bytes`] The nonce used when encrypting the + ciphertext + :param encoder: The encoder used to decode the ciphertext. + :rtype: [:class:`bytes`] + """ + # Decode our ciphertext + ciphertext = encoder.decode(ciphertext) + + if nonce is None: + # If we were given the nonce and ciphertext combined, split them. + nonce = ciphertext[: self.NONCE_SIZE] + ciphertext = ciphertext[self.NONCE_SIZE :] + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE, + ) + + plaintext = nacl.bindings.crypto_secretbox_open_easy( + ciphertext, nonce, self._key + ) + + return plaintext + + +class Aead(encoding.Encodable, StringFixer): + """ + The AEAD class encrypts and decrypts messages using the given secret key. + + Unlike :class:`~nacl.secret.SecretBox`, AEAD supports authenticating + non-confidential data received alongside the message, such as a length + or type tag. + + Like :class:`~nacl.secret.Secretbox`, this class provides authenticated + encryption. An inauthentic message will cause the decrypt function to raise + an exception. + + Likewise, the authenticator should not be mistaken for a (public-key) + signature: recipients (with the ability to decrypt messages) are capable of + creating arbitrary valid message; in particular, this means AEAD messages + are repudiable. For non-repudiable messages, sign them after encryption. + + The cryptosystem used is `XChacha20-Poly1305`_ as specified for + `standardization`_. There are `no practical limits`_ to how much can safely + be encrypted under a given key (up to 2⁶⁴ messages each containing up + to 2⁶⁴ bytes). + + .. _standardization: https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-xchacha + .. _XChacha20-Poly1305: https://doc.libsodium.org/secret-key_cryptography/aead#xchacha-20-poly1305 + .. _no practical limits: https://doc.libsodium.org/secret-key_cryptography/aead#limitations + + :param key: The secret key used to encrypt and decrypt messages + :param encoder: The encoder class used to decode the given key + + :cvar KEY_SIZE: The size that the key is required to be. + :cvar NONCE_SIZE: The size that the nonce is required to be. + :cvar MACBYTES: The size of the authentication MAC tag in bytes. + :cvar MESSAGEBYTES_MAX: The maximum size of a message which can be + safely encrypted with a single key/nonce + pair. + """ + + KEY_SIZE = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_KEYBYTES + NONCE_SIZE = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_NPUBBYTES + MACBYTES = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_ABYTES + MESSAGEBYTES_MAX = ( + nacl.bindings.crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX + ) + + def __init__( + self, + key: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ): + key = encoder.decode(key) + if not isinstance(key, bytes): + raise exc.TypeError("AEAD must be created from 32 bytes") + + if len(key) != self.KEY_SIZE: + raise exc.ValueError( + "The key must be exactly %s bytes long" % self.KEY_SIZE, + ) + + self._key = key + + def __bytes__(self) -> bytes: + return self._key + + def encrypt( + self, + plaintext: bytes, + aad: bytes = b"", + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> EncryptedMessage: + """ + Encrypts the plaintext message using the given `nonce` (or generates + one randomly if omitted) and returns the ciphertext encoded with the + encoder. + + .. warning:: It is vitally important for :param nonce: to be unique. + By default, it is generated randomly; [:class:`Aead`] uses XChacha20 + for extended (192b) nonce size, so the risk of reusing random nonces + is negligible. It is *strongly recommended* to keep this behaviour, + as nonce reuse will compromise the privacy of encrypted messages. + Should implicit nonces be inadequate for your application, the + second best option is using split counters; e.g. if sending messages + encrypted under a shared key between 2 users, each user can use the + number of messages it sent so far, prefixed or suffixed with a 1bit + user id. Note that the counter must **never** be rolled back (due + to overflow, on-disk state being rolled back to an earlier backup, + ...) + + :param plaintext: [:class:`bytes`] The plaintext message to encrypt + :param nonce: [:class:`bytes`] The nonce to use in the encryption + :param encoder: The encoder to use to encode the ciphertext + :rtype: [:class:`nacl.utils.EncryptedMessage`] + """ + if nonce is None: + nonce = random(self.NONCE_SIZE) + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE, + ) + + ciphertext = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_encrypt( + plaintext, aad, nonce, self._key + ) + + encoded_nonce = encoder.encode(nonce) + encoded_ciphertext = encoder.encode(ciphertext) + + return EncryptedMessage._from_parts( + encoded_nonce, + encoded_ciphertext, + encoder.encode(nonce + ciphertext), + ) + + def decrypt( + self, + ciphertext: bytes, + aad: bytes = b"", + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Decrypts the ciphertext using the `nonce` (explicitly, when passed as a + parameter or implicitly, when omitted, as part of the ciphertext) and + returns the plaintext message. + + :param ciphertext: [:class:`bytes`] The encrypted message to decrypt + :param nonce: [:class:`bytes`] The nonce used when encrypting the + ciphertext + :param encoder: The encoder used to decode the ciphertext. + :rtype: [:class:`bytes`] + """ + # Decode our ciphertext + ciphertext = encoder.decode(ciphertext) + + if nonce is None: + # If we were given the nonce and ciphertext combined, split them. + nonce = ciphertext[: self.NONCE_SIZE] + ciphertext = ciphertext[self.NONCE_SIZE :] + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE, + ) + + plaintext = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_decrypt( + ciphertext, aad, nonce, self._key + ) + + return plaintext diff --git a/.venv/lib/python3.9/site-packages/nacl/signing.py b/.venv/lib/python3.9/site-packages/nacl/signing.py new file mode 100644 index 0000000..536b369 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/signing.py @@ -0,0 +1,250 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional + +import nacl.bindings +from nacl import encoding +from nacl import exceptions as exc +from nacl.public import ( + PrivateKey as _Curve25519_PrivateKey, + PublicKey as _Curve25519_PublicKey, +) +from nacl.utils import StringFixer, random + + +class SignedMessage(bytes): + """ + A bytes subclass that holds a message that has been signed by a + :class:`SigningKey`. + """ + + _signature: bytes + _message: bytes + + @classmethod + def _from_parts( + cls, signature: bytes, message: bytes, combined: bytes + ) -> "SignedMessage": + obj = cls(combined) + obj._signature = signature + obj._message = message + return obj + + @property + def signature(self) -> bytes: + """ + The signature contained within the :class:`SignedMessage`. + """ + return self._signature + + @property + def message(self) -> bytes: + """ + The message contained within the :class:`SignedMessage`. + """ + return self._message + + +class VerifyKey(encoding.Encodable, StringFixer): + """ + The public key counterpart to an Ed25519 SigningKey for producing digital + signatures. + + :param key: [:class:`bytes`] Serialized Ed25519 public key + :param encoder: A class that is able to decode the `key` + """ + + def __init__( + self, key: bytes, encoder: encoding.Encoder = encoding.RawEncoder + ): + # Decode the key + key = encoder.decode(key) + if not isinstance(key, bytes): + raise exc.TypeError("VerifyKey must be created from 32 bytes") + + if len(key) != nacl.bindings.crypto_sign_PUBLICKEYBYTES: + raise exc.ValueError( + "The key must be exactly %s bytes long" + % nacl.bindings.crypto_sign_PUBLICKEYBYTES, + ) + + self._key = key + + def __bytes__(self) -> bytes: + return self._key + + def __hash__(self) -> int: + return hash(bytes(self)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return False + return nacl.bindings.sodium_memcmp(bytes(self), bytes(other)) + + def __ne__(self, other: object) -> bool: + return not (self == other) + + def verify( + self, + smessage: bytes, + signature: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Verifies the signature of a signed message, returning the message + if it has not been tampered with else raising + :class:`~nacl.exceptions.BadSignatureError`. + + :param smessage: [:class:`bytes`] Either the original messaged or a + signature and message concated together. + :param signature: [:class:`bytes`] If an unsigned message is given for + smessage then the detached signature must be provided. + :param encoder: A class that is able to decode the secret message and + signature. + :rtype: :class:`bytes` + """ + if signature is not None: + # If we were given the message and signature separately, validate + # signature size and combine them. + if not isinstance(signature, bytes): + raise exc.TypeError( + "Verification signature must be created from %d bytes" + % nacl.bindings.crypto_sign_BYTES, + ) + + if len(signature) != nacl.bindings.crypto_sign_BYTES: + raise exc.ValueError( + "The signature must be exactly %d bytes long" + % nacl.bindings.crypto_sign_BYTES, + ) + + smessage = signature + encoder.decode(smessage) + else: + # Decode the signed message + smessage = encoder.decode(smessage) + + return nacl.bindings.crypto_sign_open(smessage, self._key) + + def to_curve25519_public_key(self) -> _Curve25519_PublicKey: + """ + Converts a :class:`~nacl.signing.VerifyKey` to a + :class:`~nacl.public.PublicKey` + + :rtype: :class:`~nacl.public.PublicKey` + """ + raw_pk = nacl.bindings.crypto_sign_ed25519_pk_to_curve25519(self._key) + return _Curve25519_PublicKey(raw_pk) + + +class SigningKey(encoding.Encodable, StringFixer): + """ + Private key for producing digital signatures using the Ed25519 algorithm. + + Signing keys are produced from a 32-byte (256-bit) random seed value. This + value can be passed into the :class:`~nacl.signing.SigningKey` as a + :func:`bytes` whose length is 32. + + .. warning:: This **must** be protected and remain secret. Anyone who knows + the value of your :class:`~nacl.signing.SigningKey` or it's seed can + masquerade as you. + + :param seed: [:class:`bytes`] Random 32-byte value (i.e. private key) + :param encoder: A class that is able to decode the seed + + :ivar: verify_key: [:class:`~nacl.signing.VerifyKey`] The verify + (i.e. public) key that corresponds with this signing key. + """ + + def __init__( + self, + seed: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ): + # Decode the seed + seed = encoder.decode(seed) + if not isinstance(seed, bytes): + raise exc.TypeError( + "SigningKey must be created from a 32 byte seed" + ) + + # Verify that our seed is the proper size + if len(seed) != nacl.bindings.crypto_sign_SEEDBYTES: + raise exc.ValueError( + "The seed must be exactly %d bytes long" + % nacl.bindings.crypto_sign_SEEDBYTES + ) + + public_key, secret_key = nacl.bindings.crypto_sign_seed_keypair(seed) + + self._seed = seed + self._signing_key = secret_key + self.verify_key = VerifyKey(public_key) + + def __bytes__(self) -> bytes: + return self._seed + + def __hash__(self) -> int: + return hash(bytes(self)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return False + return nacl.bindings.sodium_memcmp(bytes(self), bytes(other)) + + def __ne__(self, other: object) -> bool: + return not (self == other) + + @classmethod + def generate(cls) -> "SigningKey": + """ + Generates a random :class:`~nacl.signing.SigningKey` object. + + :rtype: :class:`~nacl.signing.SigningKey` + """ + return cls( + random(nacl.bindings.crypto_sign_SEEDBYTES), + encoder=encoding.RawEncoder, + ) + + def sign( + self, + message: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> SignedMessage: + """ + Sign a message using this key. + + :param message: [:class:`bytes`] The data to be signed. + :param encoder: A class that is used to encode the signed message. + :rtype: :class:`~nacl.signing.SignedMessage` + """ + raw_signed = nacl.bindings.crypto_sign(message, self._signing_key) + + crypto_sign_BYTES = nacl.bindings.crypto_sign_BYTES + signature = encoder.encode(raw_signed[:crypto_sign_BYTES]) + message = encoder.encode(raw_signed[crypto_sign_BYTES:]) + signed = encoder.encode(raw_signed) + + return SignedMessage._from_parts(signature, message, signed) + + def to_curve25519_private_key(self) -> _Curve25519_PrivateKey: + """ + Converts a :class:`~nacl.signing.SigningKey` to a + :class:`~nacl.public.PrivateKey` + + :rtype: :class:`~nacl.public.PrivateKey` + """ + sk = self._signing_key + raw_private = nacl.bindings.crypto_sign_ed25519_sk_to_curve25519(sk) + return _Curve25519_PrivateKey(raw_private) diff --git a/.venv/lib/python3.9/site-packages/nacl/utils.py b/.venv/lib/python3.9/site-packages/nacl/utils.py new file mode 100644 index 0000000..d19d236 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/nacl/utils.py @@ -0,0 +1,88 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +from typing import SupportsBytes, Type, TypeVar + +import nacl.bindings +from nacl import encoding + +_EncryptedMessage = TypeVar("_EncryptedMessage", bound="EncryptedMessage") + + +class EncryptedMessage(bytes): + """ + A bytes subclass that holds a messaged that has been encrypted by a + :class:`SecretBox`. + """ + + _nonce: bytes + _ciphertext: bytes + + @classmethod + def _from_parts( + cls: Type[_EncryptedMessage], + nonce: bytes, + ciphertext: bytes, + combined: bytes, + ) -> _EncryptedMessage: + obj = cls(combined) + obj._nonce = nonce + obj._ciphertext = ciphertext + return obj + + @property + def nonce(self) -> bytes: + """ + The nonce used during the encryption of the :class:`EncryptedMessage`. + """ + return self._nonce + + @property + def ciphertext(self) -> bytes: + """ + The ciphertext contained within the :class:`EncryptedMessage`. + """ + return self._ciphertext + + +class StringFixer: + def __str__(self: SupportsBytes) -> str: + return str(self.__bytes__()) + + +def bytes_as_string(bytes_in: bytes) -> str: + return bytes_in.decode("ascii") + + +def random(size: int = 32) -> bytes: + return os.urandom(size) + + +def randombytes_deterministic( + size: int, seed: bytes, encoder: encoding.Encoder = encoding.RawEncoder +) -> bytes: + """ + Returns ``size`` number of deterministically generated pseudorandom bytes + from a seed + + :param size: int + :param seed: bytes + :param encoder: The encoder class used to encode the produced bytes + :rtype: bytes + """ + raw_data = nacl.bindings.randombytes_buf_deterministic(size, seed) + + return encoder.encode(raw_data) diff --git a/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/METADATA b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/METADATA new file mode 100644 index 0000000..95abd3f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/METADATA @@ -0,0 +1,192 @@ +Metadata-Version: 2.4 +Name: oracledb +Version: 3.4.2 +Summary: Python interface to Oracle Database +Author-email: Anthony Tuininga +License-Expression: UPL-1.0 OR Apache-2.0 +Project-URL: Homepage, https://oracle.github.io/python-oracledb +Project-URL: Installation, https://python-oracledb.readthedocs.io/en/latest/user_guide/installation.html +Project-URL: Samples, https://github.com/oracle/python-oracledb/tree/main/samples +Project-URL: Documentation, http://python-oracledb.readthedocs.io +Project-URL: Release Notes, https://python-oracledb.readthedocs.io/en/latest/release_notes.html +Project-URL: Issues, https://github.com/oracle/python-oracledb/issues +Project-URL: Source, https://github.com/oracle/python-oracledb +Keywords: Oracle,database +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Cython +Classifier: Topic :: Database +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE.txt +License-File: THIRD_PARTY_LICENSES.txt +License-File: NOTICE.txt +Requires-Dist: cryptography>=3.2.1 +Requires-Dist: typing_extensions>=4.14.0 +Provides-Extra: test +Requires-Dist: anyio; extra == "test" +Requires-Dist: numpy; extra == "test" +Requires-Dist: pandas; extra == "test" +Requires-Dist: pyarrow; extra == "test" +Requires-Dist: pytest; extra == "test" +Requires-Dist: tox; extra == "test" +Provides-Extra: oci-config +Requires-Dist: oci; extra == "oci-config" +Provides-Extra: oci-auth +Requires-Dist: oci; extra == "oci-auth" +Provides-Extra: azure-config +Requires-Dist: azure-appconfiguration; extra == "azure-config" +Requires-Dist: azure-identity; extra == "azure-config" +Requires-Dist: azure-keyvault-secrets; extra == "azure-config" +Provides-Extra: azure-auth +Requires-Dist: msal; extra == "azure-auth" +Dynamic: license-file + +# python-oracledb + +The python-oracledb driver is the widely used, open-source [Python][python] +extension module allowing Python programs to connect directly to [Oracle +Database][oracledb] with no extra libraries needed. The module is built with +Cython for safety and speed. It is lightweight and high-performance. It is +stable, well tested, and has comprehensive [documentation][documentation]. The +module is maintained by Oracle. + +The module conforms to the [Python Database API 2.0 specification][pep249] with +a considerable number of additions and a couple of minor exclusions, see the +[feature list][features]. It is used by many Python frameworks, SQL generators, +ORMs, and libraries. + +Python-oracledb has a rich feature set which is easy to use. It gives you +control over SQL and PL/SQL statement execution; for working with data frames; +for fast data ingestion; for calling NoSQL-style document APIs; for message +queueing; for receiving database notifications; and for starting and stopping +the database. It also has high availability and security features. Synchronous +and [concurrent][concurrent] coding styles are supported. Database operations +can optionally be [pipelined][pipelining]. + +Python-oracledb is the successor to the now obsolete cx_Oracle driver. + +## Python-oracledb Installation + +Run: + +``` +python -m pip install oracledb --upgrade +``` + +See [python-oracledb Installation][installation] for details. + +## Samples + +Examples can be found in the [/samples][samples] directory and the +[Python and Oracle Database Tutorial][tutorial]. + +A basic example: + +``` +import oracledb +import getpass + +un = "scott" # Sample database username +cs = "localhost/orclpdb" # Sample database connection string +# cs = "localhost/freepdb1" # For Oracle Database Free users +# cs = "localhost/orclpdb1" # Some databases may have this service +pw = getpass.getpass(f"Enter password for {un}@{cs}: ") + +with oracledb.connect(user=un, password=pw, dsn=cs) as connection: + with connection.cursor() as cursor: + sql = "select sysdate from dual" + for r in cursor.execute(sql): + print(r) +``` + +## Dependencies and Interoperability + +- Python versions 3.9 through 3.14. + + Pre-built packages are available on [PyPI][pypi] and other repositories. + + Source code is also available. + + Previous versions of python-oracledb supported older Python versions. + +- Oracle Client libraries are *optional*. + + **Thin mode**: By default python-oracledb runs in a 'Thin' mode which + connects directly to Oracle Database. + + **Thick mode**: Some advanced Oracle Database functionality is currently only + available when optional Oracle Client libraries are loaded by + python-oracledb. Libraries are available in the free [Oracle Instant + Client][instantclient] packages. Python-oracledb can use Oracle Client + libraries versions 11.2 through 23, inclusive. + +- Oracle Database + + **Thin mode**: Oracle Database 12.1 (or later) is required. + + **Thick mode**: Oracle Database 9.2 (or later) is required, depending on the + Oracle Client library version. Oracle Database's standard client-server + version interoperability allows connection to both older and newer + databases. For example when python-oracledb uses Oracle Client 19 libraries, + then it can connect to Oracle Database 11.2 or later. + +## Documentation + +See the [python-oracledb Documentation][documentation] and [Release +Notes][relnotes]. + +## Help + +Questions can be asked in [GitHub Discussions][ghdiscussions]. + +Problem reports can be raised in [GitHub Issues][ghissues]. + +## Tests + +See [/tests][tests] + +## Contributing + +This project welcomes contributions from the community. Before submitting a +pull request, please [review our contribution guide](./CONTRIBUTING.md). + +## Security + +Please consult the [security guide](./SECURITY.md) for our responsible security +vulnerability disclosure process. + +## License + +See [LICENSE][license], [THIRD_PARTY_LICENSES][tplicense], and +[NOTICE][notice]. + +[python]: https://www.python.org/ +[oracledb]: https://www.oracle.com/database/ +[instantclient]: https://www.oracle.com/database/technologies/instant-client.html +[pep249]: https://peps.python.org/pep-0249/ +[documentation]: http://python-oracledb.readthedocs.io +[relnotes]: https://python-oracledb.readthedocs.io/en/latest/release_notes.html +[license]: https://github.com/oracle/python-oracledb/blob/main/LICENSE.txt +[tplicense]: https://github.com/oracle/python-oracledb/blob/main/THIRD_PARTY_LICENSES.txt +[notice]: https://github.com/oracle/python-oracledb/blob/main/NOTICE.txt +[tutorial]: https://oracle.github.io/python-oracledb/samples/tutorial/Python-and-Oracle-Database-The-New-Wave-of-Scripting.html +[ghdiscussions]: https://github.com/oracle/python-oracledb/discussions +[ghissues]: https://github.com/oracle/python-oracledb/issues +[tests]: https://github.com/oracle/python-oracledb/tree/main/tests +[samples]: https://github.com/oracle/python-oracledb/tree/main/samples +[installation]: https://python-oracledb.readthedocs.io/en/latest/user_guide/installation.html +[features]: https://oracle.github.io/python-oracledb/#features +[concurrent]: https://python-oracledb.readthedocs.io/en/latest/user_guide/asyncio.html +[pipelining]: https://python-oracledb.readthedocs.io/en/latest/user_guide/asyncio.html#pipelining-database-operations +[pypi]: https://pypi.org/project/oracledb diff --git a/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/RECORD b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/RECORD new file mode 100644 index 0000000..656a266 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/RECORD @@ -0,0 +1,82 @@ +oracledb-3.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +oracledb-3.4.2.dist-info/METADATA,sha256=7xw4voQi3iPadEA1hCkzFUQEf3CAsfGgcHl-rN1wKVA,7745 +oracledb-3.4.2.dist-info/RECORD,, +oracledb-3.4.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +oracledb-3.4.2.dist-info/WHEEL,sha256=g-COJ2gRDHtuaAOZ38OK9x2S6MgIOU11pjHgMrTxZc8,185 +oracledb-3.4.2.dist-info/licenses/LICENSE.txt,sha256=fJ3BoHrji46pUVPUiKNUW61ogSiZUUyTKWbOgdgbuJk,12570 +oracledb-3.4.2.dist-info/licenses/NOTICE.txt,sha256=szda2oDB3YWAo803AoM8RZ54qQpHlDt6rUgCQ6PA33c,56 +oracledb-3.4.2.dist-info/licenses/THIRD_PARTY_LICENSES.txt,sha256=4HEKfr0_uq9-7ogpsW4rvhT84jX1BJYnmMrn62GfS8g,90513 +oracledb-3.4.2.dist-info/top_level.txt,sha256=DGlJh0bBtAUvhjHF-pPnrVCzNhW87yzaKB_1_FP28uU,9 +oracledb/__init__.py,sha256=0SFf-H-1wfD1dNTWa9N6JbHeb4PRTK7o9AzG5cbxpzg,39131 +oracledb/__pycache__/__init__.cpython-39.pyc,, +oracledb/__pycache__/aq.cpython-39.pyc,, +oracledb/__pycache__/arrow_array.cpython-39.pyc,, +oracledb/__pycache__/base.cpython-39.pyc,, +oracledb/__pycache__/builtin_hooks.cpython-39.pyc,, +oracledb/__pycache__/connect_params.cpython-39.pyc,, +oracledb/__pycache__/connection.cpython-39.pyc,, +oracledb/__pycache__/constants.cpython-39.pyc,, +oracledb/__pycache__/constructors.cpython-39.pyc,, +oracledb/__pycache__/cursor.cpython-39.pyc,, +oracledb/__pycache__/dataframe.cpython-39.pyc,, +oracledb/__pycache__/dbobject.cpython-39.pyc,, +oracledb/__pycache__/defaults.cpython-39.pyc,, +oracledb/__pycache__/driver_mode.cpython-39.pyc,, +oracledb/__pycache__/dsn.cpython-39.pyc,, +oracledb/__pycache__/enums.cpython-39.pyc,, +oracledb/__pycache__/errors.cpython-39.pyc,, +oracledb/__pycache__/exceptions.cpython-39.pyc,, +oracledb/__pycache__/fetch_info.cpython-39.pyc,, +oracledb/__pycache__/future.cpython-39.pyc,, +oracledb/__pycache__/lob.cpython-39.pyc,, +oracledb/__pycache__/pipeline.cpython-39.pyc,, +oracledb/__pycache__/pool.cpython-39.pyc,, +oracledb/__pycache__/pool_params.cpython-39.pyc,, +oracledb/__pycache__/soda.cpython-39.pyc,, +oracledb/__pycache__/sparse_vector.cpython-39.pyc,, +oracledb/__pycache__/subscr.cpython-39.pyc,, +oracledb/__pycache__/utils.cpython-39.pyc,, +oracledb/__pycache__/var.cpython-39.pyc,, +oracledb/__pycache__/version.cpython-39.pyc,, +oracledb/aq.py,sha256=NBAqRAmUN3jWf-7O7MJo_aiFcOITW8wPRbMKFbAndpU,24248 +oracledb/arrow_array.py,sha256=aReIFgQVcQs7TuqpDNL5YBESul8Jp3OA6JyNJV_JSDk,3407 +oracledb/arrow_impl.cpython-39-x86_64-linux-gnu.so,sha256=ziYZuV9_bWkQUxnNGmPk9VChJbSaR9xIexAEdXUFWrY,268432 +oracledb/base.py,sha256=CCPbBrJyI63yiz3K0r7VnaVmGE7wlwBVqW99N60qilg,2008 +oracledb/base_impl.cpython-39-x86_64-linux-gnu.so,sha256=XDXiNNNOgvFjXrpc7EIYW_9klMZvT-g-ToJHBe_eT2g,2230448 +oracledb/builtin_hooks.py,sha256=fb5w5Lxp4WNIqfIOnx4H2e6x4xbJMN8p7jGlYF2JdEQ,3633 +oracledb/connect_params.py,sha256=HaGHntHMMk7d7Pq_JwPDwy_czrA6_rjAoMw_hAP2v7I,51792 +oracledb/connection.py,sha256=sVcXxeaHScMyn5iVW2XvELrOg0ftEi4dmWAELnxqvyA,135995 +oracledb/constants.py,sha256=eCKivf3BowzFSr5Oaspab08W2PI9JN-P8wlV1NN9sfY,3193 +oracledb/constructors.py,sha256=o4mVcdIoKnLDL1CIBgyN9W4MjhKx3XVXNqf2b5Hjt4o,3402 +oracledb/cursor.py,sha256=J6IwFUVlknBqPWUrx9NtHlNHHxl1zMzCGWBf2SP8eNw,57994 +oracledb/dataframe.py,sha256=aEAH_j6GkRnPPL9QA-ZwiGxkq9R0K0NGfe4hKrOJt4k,4278 +oracledb/dbobject.py,sha256=5eFET9r8TctGXnZPPpJuWsGOQxB6xIQ81vFK-SCP6Yw,13297 +oracledb/defaults.py,sha256=nPxq9fKQVVgDCHxBmem9ZO67SQu_woJOX8z20z1H47g,12456 +oracledb/driver_mode.py,sha256=KGqjzs9g17mb6xjQIbYvc7MSc7QndvN_FzMIVv2gLPE,5502 +oracledb/dsn.py,sha256=KZnIaf9LwiWZy4l5u7ZxpR0DWXY_1r-iaTH1xktqFhE,3231 +oracledb/enums.py,sha256=BC6pzX3o0NpFQTFoDWpkY6UbB_9oQNGCiiLNjk85aZk,2827 +oracledb/errors.py,sha256=R0xB__9DmuDDSBx6il8CX9J4zGZLv1t13i57YVVBZi0,39858 +oracledb/exceptions.py,sha256=qUtkmFsByCHW2DsmI6V7cpoBTij5qZG7f0VtaiKlbJ0,4006 +oracledb/fetch_info.py,sha256=NfiwCs6ekaorsMj9QjC55xeE7ZRkGVjGC28YhPaLxNQ,11317 +oracledb/future.py,sha256=prbDZef6NSvfSZBk2bIQSwgTMmsJDdOzmyLaeAub5qs,1721 +oracledb/lob.py,sha256=RvjHeiFClmLLFVhp4YOG7Khd9yqc6ikxZEfbKwxN1Dk,11758 +oracledb/pipeline.py,sha256=UM1FzkRq9_ce9s2vVGvz_qjMLNmawL-7KZn279hwmJs,17926 +oracledb/plugins/__pycache__/azure_config_provider.cpython-39.pyc,, +oracledb/plugins/__pycache__/azure_tokens.cpython-39.pyc,, +oracledb/plugins/__pycache__/oci_config_provider.cpython-39.pyc,, +oracledb/plugins/__pycache__/oci_tokens.cpython-39.pyc,, +oracledb/plugins/azure_config_provider.py,sha256=e_YBwPakMkAm1wI0KMlRJB5sTgIHvaLXmi-kDBq1Tjg,9764 +oracledb/plugins/azure_tokens.py,sha256=DfjmBbbaO7Ux_z85Tg2TEVVHsF_I1ZPHmUx_GYhNjEE,2927 +oracledb/plugins/oci_config_provider.py,sha256=G8aUihhoTjqe6u9NksA55rzgw1GbfGGYaoCgDlMWv_U,8985 +oracledb/plugins/oci_tokens.py,sha256=ura-NJggYZpWTh-cdhoix0fRGdzpNbKTbhd6KkN1lo0,5825 +oracledb/pool.py,sha256=fm-POk9TGXb1-wKp9jCWbdix6vK4AO5Xi_C6mDj6hrY,70244 +oracledb/pool_params.py,sha256=z6jXBqsAJPbIInaUpPL8SfUKqiGZHw9FzvhTNjoyAqM,44767 +oracledb/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +oracledb/soda.py,sha256=VtFhWcJoNp7YaU1De6t7n5WIArMz_DlU0PvYgLDi2_c,30637 +oracledb/sparse_vector.py,sha256=4gq92s54GKs56iZZIVZKATPtbiYqd9a-ERn9yqq2Myw,3829 +oracledb/subscr.py,sha256=GYvneTAQZGKKj4-cxa4IoakO1-7xzoYKZNZ7e2JDrJ4,12456 +oracledb/thick_impl.cpython-39-x86_64-linux-gnu.so,sha256=28C8r4aVRvU1-THesKDicA4Wrv7YEA9TDJ-Bxni7PSU,1313792 +oracledb/thin_impl.cpython-39-x86_64-linux-gnu.so,sha256=lXcNpvATq1aPFW1BHaF2H2Fu2kyO2E-yP3fyjg38l7A,3081936 +oracledb/utils.py,sha256=iu16r7585AnDT65dJAGYuBv3qQ1j-Z-7gobQiKfaFCU,17524 +oracledb/var.py,sha256=S0aMvacvBLJ6gFEI0X5LMLvzUXh65xzA3tH_Ga2tvqs,6931 +oracledb/version.py,sha256=b94vCFvLo8YLp64g42AVVo0Z71r6-ch8jeCff2sOBa0,1533 diff --git a/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/REQUESTED b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/WHEEL new file mode 100644 index 0000000..c28edf9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.10.2) +Root-Is-Purelib: false +Tag: cp39-cp39-manylinux_2_17_x86_64 +Tag: cp39-cp39-manylinux2014_x86_64 +Tag: cp39-cp39-manylinux_2_28_x86_64 + diff --git a/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/licenses/LICENSE.txt b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000..38fd148 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/licenses/LICENSE.txt @@ -0,0 +1,229 @@ +Copyright (c) 2016, 2025 Oracle and/or its affiliates. + +This software is dual-licensed to you under the Universal Permissive License +(UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +either license. + +If you elect to accept the software under the Apache License, Version 2.0, +the following applies: + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +The Universal Permissive License (UPL), Version 1.0 +=================================================== + +Subject to the condition set forth below, permission is hereby granted to any +person obtaining a copy of this software, associated documentation and/or data +(collectively the "Software"), free of charge and under any and all copyright +rights in the Software, and any and all patent rights owned or freely +licensable by each licensor hereunder covering either (i) the unmodified +Software as contributed to or provided by such licensor, or (ii) the Larger +Works (as defined below), to deal in both + +(a) the Software, and + +(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if + one is included with the Software (each a "Larger Work" to which the + Software is contributed by such licensors), + +without restriction, including without limitation the rights to copy, create +derivative works of, display, perform, and distribute the Software and make, +use, sell, offer for sale, import, export, have made, and have sold the +Software and the Larger Work(s), and to sublicense the foregoing rights on +either these or other terms. + +This license is subject to the following condition: + +The above copyright notice and either this complete permission notice or at a +minimum a reference to the UPL must be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +Apache License +============== + +Version 2.0, January 2004 + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. **Definitions**. + + "License" shall mean the terms and conditions for use, reproduction, and + distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by the + copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all other + entities that control, are controlled by, or are under common control with + that entity. For the purposes of this definition, "control" means (i) the + power, direct or indirect, to cause the direction or management of such + entity, whether by contract or otherwise, or (ii) ownership of fifty + percent (50%) or more of the outstanding shares, or (iii) beneficial + ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity exercising + permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation source, + and configuration files. + + "Object" form shall mean any form resulting from mechanical transformation + or translation of a Source form, including but not limited to compiled + object code, generated documentation, and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or Object form, + made available under the License, as indicated by a copyright notice that + is included in or attached to the work (an example is provided in the + Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object form, + that is based on (or derived from) the Work and for which the editorial + revisions, annotations, elaborations, or other modifications represent, as + a whole, an original work of authorship. For the purposes of this License, + Derivative Works shall not include works that remain separable from, or + merely link (or bind by name) to the interfaces of, the Work and Derivative + Works thereof. + + "Contribution" shall mean any work of authorship, including the original + version of the Work and any modifications or additions to that Work or + Derivative Works thereof, that is intentionally submitted to Licensor for + inclusion in the Work by the copyright owner or by an individual or Legal + Entity authorized to submit on behalf of the copyright owner. For the + purposes of this definition, "submitted" means any form of electronic, + verbal, or written communication sent to the Licensor or its + representatives, including but not limited to communication on electronic + mailing lists, source code control systems, and issue tracking systems that + are managed by, or on behalf of, the Licensor for the purpose of discussing + and improving the Work, but excluding communication that is conspicuously + marked or otherwise designated in writing by the copyright owner as "Not a + Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity on + behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. **Grant of Copyright License.** Subject to the terms and conditions of this + License, each Contributor hereby grants to You a perpetual, worldwide, + non-exclusive, no-charge, royalty-free, irrevocable copyright license to + reproduce, prepare Derivative Works of, publicly display, publicly perform, + sublicense, and distribute the Work and such Derivative Works in Source or + Object form. + +3. **Grant of Patent License.** Subject to the terms and conditions of this + License, each Contributor hereby grants to You a perpetual, worldwide, + non-exclusive, no-charge, royalty-free, irrevocable (except as stated in + this section) patent license to make, have made, use, offer to sell, sell, + import, and otherwise transfer the Work, where such license applies only to + those patent claims licensable by such Contributor that are necessarily + infringed by their Contribution(s) alone or by combination of their + Contribution(s) with the Work to which such Contribution(s) was submitted. + If You institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work or a + Contribution incorporated within the Work constitutes direct or + contributory patent infringement, then any patent licenses granted to You + under this License for that Work shall terminate as of the date such + litigation is filed. + +4. **Redistribution.** You may reproduce and distribute copies of the Work or + Derivative Works thereof in any medium, with or without modifications, and + in Source or Object form, provided that You meet the following conditions: + + 1. You must give any other recipients of the Work or Derivative Works a + copy of this License; and + + 2. You must cause any modified files to carry prominent notices stating + that You changed the files; and + + 3. You must retain, in the Source form of any Derivative Works that You + distribute, all copyright, patent, trademark, and attribution notices + from the Source form of the Work, excluding those notices that do not + pertain to any part of the Derivative Works; and + + 4. If the Work includes a "NOTICE" text file as part of its distribution, + then any Derivative Works that You distribute must include a readable + copy of the attribution notices contained within such NOTICE file, + excluding those notices that do not pertain to any part of the + Derivative Works, in at least one of the following places: within a + NOTICE text file distributed as part of the Derivative Works; within + the Source form or documentation, if provided along with the Derivative + Works; or, within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents of the + NOTICE file are for informational purposes only and do not modify the + License. You may add Your own attribution notices within Derivative + Works that You distribute, alongside or as an addendum to the NOTICE + text from the Work, provided that such additional attribution notices + cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may + provide additional or different license terms and conditions for use, + reproduction, or distribution of Your modifications, or for any such + Derivative Works as a whole, provided Your use, reproduction, and + distribution of the Work otherwise complies with the conditions stated + in this License. + +5. **Submission of Contributions.** Unless You explicitly state otherwise, any + Contribution intentionally submitted for inclusion in the Work by You to + the Licensor shall be under the terms and conditions of this License, + without any additional terms or conditions. Notwithstanding the above, + nothing herein shall supersede or modify the terms of any separate license + agreement you may have executed with Licensor regarding such Contributions. + +6. **Trademarks.** This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, except + as required for reasonable and customary use in describing the origin of + the Work and reproducing the content of the NOTICE file. + +7. **Disclaimer of Warranty.** Unless required by applicable law or agreed to + in writing, Licensor provides the Work (and each Contributor provides its + Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied, including, without limitation, any + warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or + FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for + determining the appropriateness of using or redistributing the Work and + assume any risks associated with Your exercise of permissions under this + License. + +8. **Limitation of Liability.** In no event and under no legal theory, whether + in tort (including negligence), contract, or otherwise, unless required by + applicable law (such as deliberate and grossly negligent acts) or agreed to + in writing, shall any Contributor be liable to You for damages, including + any direct, indirect, special, incidental, or consequential damages of any + character arising as a result of this License or out of the use or + inability to use the Work (including but not limited to damages for loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor has been + advised of the possibility of such damages. + +9. **Accepting Warranty or Additional Liability.** While redistributing the + Work or Derivative Works thereof, You may choose to offer, and charge a fee + for, acceptance of support, warranty, indemnity, or other liability + obligations and/or rights consistent with this License. However, in + accepting such obligations, You may act only on Your own behalf and on Your + sole responsibility, not on behalf of any other Contributor, and only if + You agree to indemnify, defend, and hold each Contributor harmless for any + liability incurred by, or claims asserted against, such Contributor by + reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS diff --git a/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/licenses/NOTICE.txt b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/licenses/NOTICE.txt new file mode 100644 index 0000000..4cd6fb0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/licenses/NOTICE.txt @@ -0,0 +1 @@ +Copyright (c) 2016, 2025, Oracle and/or its affiliates. diff --git a/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/licenses/THIRD_PARTY_LICENSES.txt b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/licenses/THIRD_PARTY_LICENSES.txt new file mode 100644 index 0000000..0f8f1a9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/licenses/THIRD_PARTY_LICENSES.txt @@ -0,0 +1,1459 @@ +------------------------------------------------------------------------------- +------------------------------------------------------------------------------- + +Cython + +The following attribution text was taken from Component Cython Version 0.24.1 + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +------------------------------------------------------------------------------- +------------------------------------------------------------------------------- + +Cryptography + +LICENSE: https://github.com/pyca/cryptography/blob/3.4.x/LICENSE +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made +under the terms of *both* these licenses. +The code used in the OS random engine is derived from CPython, and is licensed +under the terms of the PSF License Agreement. +___________________________________________________________________________ + +LICENSE.APACHE: https://github.com/pyca/cryptography/blob/3.4.x/LICENSE.APACHE + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + 1. Definitions. + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + END OF TERMS AND CONDITIONS + APPENDIX: How to apply the Apache License to your work. + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + Copyright [yyyy] [name of copyright owner] + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + https://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +___________________________________________________________________________ + +LICENSE.BSD: https://github.com/pyca/cryptography/blob/3.4.x/LICENSE.BSD + +Copyright (c) Individual contributors. +All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. Neither the name of PyCA Cryptography nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +============================================================================================= + +Fourth-party dependencies: + +six LICENSE - MIT: https://github.com/benjaminp/six/blob/1.15.0/LICENSE + +Copyright (c) 2010-2020 Benjamin Peterson +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +___________________________________________________________________________________________ + +cffi LICENSE - MIT: https://github.com/python-cffi/release-doc/blob/master/LICENSE + +MIT License +Copyright (c) 2020 CFFI, Python's C Foreign Function Interface +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +___________________________________________________________________________________________ + +pycparser LICENSE: https://github.com/eliben/pycparser/blob/master/LICENSE + +pycparser -- A C parser in Python +Copyright (c) 2008-2017, Eli Bendersky +All rights reserved. +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Eli Bendersky nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------------------------------------------------------------------------------- +------------------------------------------------------------------------------- + +Apache Arrow nanoarrow +Copyright 2023 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +---------- + + + +Apache nanoarrow 0.6.0 + + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------- + +Copyright 2015-2023 Mikkel F. Jørgensen, dvide.com +Copyright (c) 2016 Mikkel Fahnøe Jørgensen, dvide.com +Copyright (c) 2005-2016 Paul Hsieh +Copyright (c) 2024 Mikkel Fahnøe Jørgensen, dvide.com + +---------- + +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. + +------------------------------------------------------------------------------- +------------------------------------------------------------------------------- + +Microsoft Authentication Library (MSAL) for Python + +The MIT License (MIT) + +Copyright (c) Microsoft Corporation. +All rights reserved. + +This code is licensed under the MIT License. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files(the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions : + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +----------------------------------------------------------------------------------------------------------------------------- +cryptography + +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made +under the terms of *both* these licenses. + + -- Apache License + + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +--BSD License + +Copyright (c) Individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of PyCA Cryptography nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +----------------------------------------------------------------------------------------------------------------------------- + +cffi + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + The MIT License + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + +----------------------------------------------------------------------------------------------------------------------------- +pycparser +pycparser -- A C parser in Python + +Copyright (c) 2008-2022, Eli Bendersky +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of the copyright holder nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +----------------------------------------------------------------------------------------------------------------------------- + +PyJWT + +The MIT License (MIT) + +Copyright (c) 2015-2022 José Padilla + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +----------------------------------------------------------------------------------------------------------------------------- + +certifi + +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ + + +--Mozilla Public License + +Mozilla Public License
Version 2.0 +1. Definitions +1.1. “Contributor” +means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. +1.2. “Contributor Version” +means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor’s Contribution. +1.3. “Contribution” +means Covered Software of a particular Contributor. +1.4. “Covered Software” +means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. +1.5. “Incompatible With Secondary Licenses” +means +* that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or +* that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. +1.6. “Executable Form” +means any form of the work other than Source Code Form. +1.7. “Larger Work” +means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. +1.8. “License” +means this document. +1.9. “Licensable” +means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. +1.10. “Modifications” +means any of the following: +* any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or +* any new file in Source Code Form that contains any Covered Software. +1.11. “Patent Claims” of a Contributor +means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. +1.12. “Secondary License” +means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. +1.13. “Source Code Form” +means the form of the work preferred for making modifications. +1.14. “You” (or “Your”) +means an individual or a legal entity exercising rights under this License. For legal entities, “You” includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, “control” means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. +2. License Grants and Conditions +2.1. Grants +Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: +* under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and +* under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. +2.2. Effective Date +The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. +2.3. Limitations on Grant Scope +The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: +* for any code that a Contributor has removed from Covered Software; or +* for infringements caused by: (i) Your and any other third party’s modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or +* under Patent Claims infringed by Covered Software in the absence of its Contributions. +This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). +2.4. Subsequent Licenses +No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). +2.5. Representation +Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. +2.6. Fair Use +This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. +2.7. Conditions +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. +3. Responsibilities +3.1. Distribution of Source Form +All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients’ rights in the Source Code Form. +3.2. Distribution of Executable Form +If You distribute Covered Software in Executable Form then: +* such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and +* You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients’ rights in the Source Code Form under this License. +3.3. Distribution of a Larger Work +You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). +3.4. Notices +You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. +3.5. Application of Additional Terms +You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. +4. Inability to Comply Due to Statute or Regulation +If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. +5. Termination +5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. +5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. +6. Disclaimer of Warranty +Covered Software is provided under this License on an “as is” basis, without warranty of any kind, either expressed, implied, or statutory, including, without limitation, warranties that the Covered Software is free of defects, merchantable, fit for a particular purpose or non-infringing. The entire risk as to the quality and performance of the Covered Software is with You. Should any Covered Software prove defective in any respect, You (not any Contributor) assume the cost of any necessary servicing, repair, or correction. This disclaimer of warranty constitutes an essential part of this License. No use of any Covered Software is authorized under this License except under this disclaimer. +7. Limitation of Liability +Under no circumstances and under no legal theory, whether tort (including negligence), contract, or otherwise, shall any Contributor, or anyone who distributes Covered Software as permitted above, be liable to You for any direct, indirect, special, incidental, or consequential damages of any character including, without limitation, damages for lost profits, loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses, even if such party shall have been informed of the possibility of such damages. This limitation of liability shall not apply to liability for death or personal injury resulting from such party’s negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You. +8. Litigation +Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party’s ability to bring cross-claims or counter-claims. +9. Miscellaneous +This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. +10. Versions of the License +10.1. New Versions +Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. +10.2. Effect of New Versions +You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. +10.3. Modified Versions +If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). +10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses +If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. +Exhibit A - Source Code Form License Notice +This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at https://mozilla.org/MPL/2.0/. +If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. +You may add additional accurate notices of copyright ownership. +Exhibit B - “Incompatible With Secondary Licenses” Notice +This Source Code Form is “Incompatible With Secondary Licenses”, as defined by the Mozilla Public License, v. 2.0. + + +----------------------------------------------------------------------------------------------------------------------------- + +charset-normalizer + +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +----------------------------------------------------------------------------------------------------------------------------- +idna +BSD 3-Clause License + +Copyright (c) 2013-2024, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +--------------------------------------------------------- +urllib + +MIT License + +Copyright (c) 2008-2020 Andrey Petrov and contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +----------------------------------------------------------------------------------------------------------------------------- + +requests + +Copyright 2019 Kenneth Reitz + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +------------------------------------------------------------------------------- +------------------------------------------------------------------------------- diff --git a/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/top_level.txt new file mode 100644 index 0000000..2b2f793 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb-3.4.2.dist-info/top_level.txt @@ -0,0 +1 @@ +oracledb diff --git a/.venv/lib/python3.9/site-packages/oracledb/__init__.py b/.venv/lib/python3.9/site-packages/oracledb/__init__.py new file mode 100644 index 0000000..7e11e7f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/__init__.py @@ -0,0 +1,1275 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2020, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# __init__.py +# +# Package initialization module. +# ----------------------------------------------------------------------------- + +import collections +import sys +import warnings + +if sys.version_info[:2] < (3, 9): + message = ( + f"Python {sys.version_info[0]}.{sys.version_info[1]} is no longer " + "supported by the Python core team. Therefore, support for it is " + "deprecated in python-oracledb and will be removed in a future release" + ) + warnings.warn(message) + +from . import base_impl, thick_impl, thin_impl + +from .base_impl import ( + ApiType as ApiType, + DbType as DbType, +) + +from .enums import ( + AuthMode as AuthMode, + PipelineOpType as PipelineOpType, + PoolGetMode as PoolGetMode, + Purity as Purity, + VectorFormat as VectorFormat, +) + +from . import constants, version + +from .arrow_array import ( + ArrowArray as ArrowArray, +) + +from .constructors import ( + Binary as Binary, + Date as Date, + DateFromTicks as DateFromTicks, + Time as Time, + TimeFromTicks as TimeFromTicks, + Timestamp as Timestamp, + TimestampFromTicks as TimestampFromTicks, +) + +from .dataframe import ( + DataFrame as DataFrame, +) + +from .dbobject import ( + DbObject as DbObject, + DbObjectAttr as DbObjectAttr, + DbObjectType as DbObjectType, +) + +from .defaults import ( + Defaults as Defaults, +) + +from .driver_mode import ( + is_thin_mode as is_thin_mode, +) + +from .dsn import ( + makedsn as makedsn, +) + +from .errors import ( + _Error as _Error, +) + +from .exceptions import ( + Warning as Warning, + Error as Error, + DatabaseError as DatabaseError, + DataError as DataError, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + ProgrammingError as ProgrammingError, +) + +from .fetch_info import ( + FetchInfo as FetchInfo, +) + +from .future import ( + __future__ as __future__, +) + +from .lob import ( + LOB as LOB, + AsyncLOB as AsyncLOB, +) + +from .pipeline import ( + Pipeline as Pipeline, + PipelineOp as PipelineOp, + PipelineOpResult as PipelineOpResult, + create_pipeline as create_pipeline, +) + +from .soda import ( + SodaDatabase as SodaDatabase, + SodaCollection as SodaCollection, + SodaDocument as SodaDocument, + SodaDocCursor as SodaDocCursor, + SodaOperation as SodaOperation, +) + +from .sparse_vector import ( + SparseVector as SparseVector, +) + +from .utils import ( + clientversion as clientversion, + enable_thin_mode as enable_thin_mode, + from_arrow as from_arrow, + init_oracle_client as init_oracle_client, + register_params_hook as register_params_hook, + register_password_type as register_password_type, + register_protocol as register_protocol, + unregister_params_hook as unregister_params_hook, +) + +from .var import ( + Var as Var, +) + + +# module attributes +apilevel: str = "2.0" +""" +A string constant stating the Python DB API level supported by python-oracledb. +""" + +defaults: Defaults = Defaults() +""" +The defaults object for setting default behaviors of python-oracledb. +""" + +paramstyle: str = "named" +""" +A string constant stating the type of parameter marker formatting expected by +the interface. Currently 'named' as in 'where name = :name'. +""" + +threadsafety: int = 2 +""" +An integer constant stating the level of thread safety that python-oracledb +supports. Currently 2, which means that threads may share the module and +connections, but not cursors. Sharing means that a thread may use a resource +without wrapping it using a mutex semaphore to implement resource locking. +""" + +__version__: str = version.__version__ +""" +A string constant stating the version of the module. +""" + + +# API types +BINARY: ApiType = base_impl.BINARY +""" +This type object is used to describe columns in a database that contain binary +data. The database types :data:`DB_TYPE_RAW` and :data:`DB_TYPE_LONG_RAW` will +compare equal to this value. If a variable is created with this type, the +database type :data:`DB_TYPE_RAW` will be used. +""" + +DATETIME: ApiType = base_impl.DATETIME +""" +This type object is used to describe columns in a database that are dates. The +database types :data:`DB_TYPE_DATE`, :data:`DB_TYPE_TIMESTAMP`, +:data:`DB_TYPE_TIMESTAMP_LTZ` and :data:`DB_TYPE_TIMESTAMP_TZ` will all compare +equal to this value. If a variable is created with this type, the database type +:data:`DB_TYPE_DATE` will be used. +""" + +NUMBER: ApiType = base_impl.NUMBER +""" +This type object is used to describe columns in a database that are numbers. +The database types :data:`DB_TYPE_BINARY_DOUBLE`, :data:`DB_TYPE_BINARY_FLOAT`, +:data:`DB_TYPE_BINARY_INTEGER` and :data:`DB_TYPE_NUMBER` will all compare +equal to this value. If a variable is created with this type, the database type +:data:`DB_TYPE_NUMBER` will be used. +""" + +ROWID: ApiType = base_impl.ROWID +""" +This type object is used to describe the pseudo column "rowid". The database +types :data:`DB_TYPE_ROWID` and :data:`DB_TYPE_UROWID` will compare equal to +this value. If a variable is created with this type, the database type +:data:`DB_TYPE_VARCHAR` will be used. +""" + +STRING: ApiType = base_impl.STRING +""" +This type object is used to describe columns in a database that are strings. +The database types :data:`DB_TYPE_CHAR`, :data:`DB_TYPE_LONG`, +:data:`DB_TYPE_NCHAR`, :data:`DB_TYPE_NVARCHAR` and :data:`DB_TYPE_VARCHAR` +will all compare equal to this value. If a variable is created with this type, +the database type :data:`DB_TYPE_VARCHAR` will be used. +""" + + +# connection authorization modes +AUTH_MODE_DEFAULT: AuthMode = AuthMode.DEFAULT +""" +This constant is used to specify that default authentication is to take place. +This is the default value if no mode is passed at all. + +It can be used for standalone and pooled connections in python-oracledb Thin +mode, and for standalone connections in Thick mode. + +This constant deprecates the ``DEFAULT_AUTH`` constant that was used in the +obsolete cx_Oracle driver, and was the default ``mode`` value. +""" + +AUTH_MODE_PRELIM: AuthMode = AuthMode.PRELIM +""" +This constant is used to specify that preliminary authentication is to be used. +This is needed for performing database startup and shutdown. + +It can only be used in python-oracledb Thick mode for standalone connections. + +This constant deprecates the ``PRELIM_AUTH`` constant that was used in the +obsolete cx_Oracle driver. +""" + +AUTH_MODE_SYSASM: AuthMode = AuthMode.SYSASM +""" +This constant is used to specify that SYSASM access is to be acquired. + +It can be used for standalone and pooled connections in python-oracledb Thin +mode, and for standalone connections in Thick mode. + +This constant deprecates the ``SYSASM`` constant that was used in the obsolete +cx_Oracle driver. +""" + +AUTH_MODE_SYSBKP: AuthMode = AuthMode.SYSBKP +""" +This constant is used to specify that SYSBACKUP access is to be acquired. + +It can be used for standalone and pooled connections in python-oracledb Thin +mode, and for standalone connections in Thick mode. + +This constant deprecates the ``SYSBKP`` constant that was used in the +obsolete cx_Oracle driver. +""" + +AUTH_MODE_SYSDBA: AuthMode = AuthMode.SYSDBA +""" +This constant is used to specify that SYSDBA access is to be acquired. + +It can be used for standalone and pooled connections in python-oracledb Thin +mode, and for standalone connections in Thick mode. + +This constant deprecates the ``SYSDBA`` constant that was used in the obsolete +cx_Oracle driver. +""" + +AUTH_MODE_SYSDGD: AuthMode = AuthMode.SYSDGD +""" +This constant is used to specify that SYSDG access is to be acquired. + +It can be used for standalone and pooled connections in python-oracledb Thin +mode, and for standalone connections in Thick mode. + +This constant deprecates the ``SYSDGD`` constant that was used in the obsolete +cx_Oracle driver. +""" + +AUTH_MODE_SYSKMT: AuthMode = AuthMode.SYSKMT +""" +This constant is used to specify that SYSKM access is to be acquired. + +It can be used for standalone and pooled connections in python-oracledb Thin +mode, and for standalone connections in Thick mode. + +This constant deprecates the ``SYSKMT`` constant that was used in the obsolete +cx_Oracle driver. +""" + +AUTH_MODE_SYSOPER: AuthMode = AuthMode.SYSOPER +""" +This constant is used to specify that SYSOPER access is to be acquired. + +It can be used for standalone and pooled connections in python-oracledb Thin +mode, and for standalone connections in Thick mode. + +This constant deprecates the ``SYSOPER`` constant that was used in the obsolete +cx_Oracle driver. +""" + +AUTH_MODE_SYSRAC: AuthMode = AuthMode.SYSRAC +""" +This constant is used to specify that SYSRAC access is to be acquired. + +It can be used for standalone and pooled connections in python-oracledb Thin +mode, and for standalone connections in Thick mode. + +This constant deprecates the ``SYSRAC`` constant that was used in the obsolete +cx_Oracle driver. +""" + + +# database shutdown modes +DBSHUTDOWN_ABORT: int = constants.DBSHUTDOWN_ABORT +""" +This constant is used to specify that the caller should not wait for current +processing to complete or for users to disconnect from the database. This +should only be used in unusual circumstances since database recovery may be +necessary upon next startup. +""" + +DBSHUTDOWN_FINAL: int = constants.DBSHUTDOWN_FINAL +""" +This constant is used to specify that the instance can be truly halted. This +should only be done after the database has been shutdown with one of the other +modes (except abort) and the database has been closed and dismounted using the +appropriate SQL commands. +""" + +DBSHUTDOWN_IMMEDIATE: int = constants.DBSHUTDOWN_IMMEDIATE +""" +This constant is used to specify that all uncommitted transactions should be +rolled back and any connected users should be disconnected. +""" + +DBSHUTDOWN_TRANSACTIONAL: int = constants.DBSHUTDOWN_TRANSACTIONAL +""" +This constant is used to specify that further connections to the database +should be prohibited and no new transactions should be allowed. It then waits +for all active transactions to complete. +""" + +DBSHUTDOWN_TRANSACTIONAL_LOCAL: int = constants.DBSHUTDOWN_TRANSACTIONAL_LOCAL +""" +This constant is used to specify that further connections to the database +should be prohibited and no new transactions should be allowed. It then waits +for only local active transactions to complete. +""" + + +# database types +DB_TYPE_BFILE: DbType = base_impl.DB_TYPE_BFILE +""" +Describes columns, attributes or array elements in a database that are of type +BFILE. It will compare equal to the DB API type :data:`BINARY`. +""" + +DB_TYPE_BINARY_DOUBLE: DbType = base_impl.DB_TYPE_BINARY_DOUBLE +""" +Describes columns, attributes or array elements in a database that are of type +BINARY_DOUBLE. It will compare equal to the DB API type :data:`NUMBER`. +""" + +DB_TYPE_BINARY_FLOAT: DbType = base_impl.DB_TYPE_BINARY_FLOAT +""" +Describes columns, attributes or array elements in a database that are of type +BINARY_FLOAT. It will compare equal to the DB API type :data:`NUMBER`. +""" + +DB_TYPE_BINARY_INTEGER: DbType = base_impl.DB_TYPE_BINARY_INTEGER +""" +Describes attributes or array elements in a database that are of type +BINARY_INTEGER. It will compare equal to the DB API type :data:`NUMBER`. +""" + +DB_TYPE_BLOB: DbType = base_impl.DB_TYPE_BLOB +""" +Describes columns, attributes or array elements in a database that are of type +BLOB. It will compare equal to the DB API type :data:`BINARY`. +""" + +DB_TYPE_BOOLEAN: DbType = base_impl.DB_TYPE_BOOLEAN +""" +Describes attributes or array elements in a database that are of type BOOLEAN. +It is only available in Oracle 12.1 and higher and only within PL/SQL. +""" + +DB_TYPE_CHAR: DbType = base_impl.DB_TYPE_CHAR +""" +Describes columns, attributes or array elements in a database that are of type +CHAR. It will compare equal to the DB API type :data:`STRING`. + +Note that these are fixed length string values and behave differently from +VARCHAR2. +""" + +DB_TYPE_CLOB: DbType = base_impl.DB_TYPE_CLOB +""" +Describes columns, attributes or array elements in a database that are of type +CLOB. It will compare equal to the DB API type :data:`STRING`. +""" + +DB_TYPE_CURSOR: DbType = base_impl.DB_TYPE_CURSOR +""" +Describes columns in a database that are of type CURSOR. In PL/SQL, these are +known as REF CURSOR. +""" + +DB_TYPE_DATE: DbType = base_impl.DB_TYPE_DATE +""" +Describes columns, attributes or array elements in a database that are of type +DATE. It will compare equal to the DB API type :data:`DATETIME`. +""" + +DB_TYPE_INTERVAL_DS: DbType = base_impl.DB_TYPE_INTERVAL_DS +""" +Describes columns, attributes or array elements in a database that are of type +INTERVAL DAY TO SECOND. +""" + +DB_TYPE_INTERVAL_YM: DbType = base_impl.DB_TYPE_INTERVAL_YM +""" +Describes columns, attributes or array elements in a database that are of type +INTERVAL YEAR TO MONTH. +""" + +DB_TYPE_JSON: DbType = base_impl.DB_TYPE_JSON +""" +Describes columns in a database that are of type JSON (with Oracle Database 21 +or later). +""" + +DB_TYPE_LONG: DbType = base_impl.DB_TYPE_LONG +""" +Describes columns, attributes or array elements in a database that are of type +LONG. It will compare equal to the DB API type :data:`STRING`. +""" + +DB_TYPE_LONG_NVARCHAR: DbType = base_impl.DB_TYPE_LONG_NVARCHAR +""" +This constant can be used in output type handlers when fetching NCLOB columns +as a string. (Note a type handler is not needed if +:data:`oracledb.defaults.fetch_lobs `, or the equivalent +execution parameter, is set to *False*). For IN binds, this constant can be +used to create a bind variable in :meth:`Cursor.var()` or via +:meth:`Cursor.setinputsizes()`. The ``DB_TYPE_LONG_NVARCHAR`` value won't be +shown in query metadata since it is not a database type. + +It will compare equal to the DB API type :data:`STRING`. +""" + +DB_TYPE_LONG_RAW: DbType = base_impl.DB_TYPE_LONG_RAW +""" +Describes columns, attributes or array elements in a database that are of type +LONG RAW. It will compare equal to the DB API type :data:`BINARY`. +""" + +DB_TYPE_NCHAR: DbType = base_impl.DB_TYPE_NCHAR +""" +Describes columns, attributes or array elements in a database that are of type +NCHAR. It will compare equal to the DB API type :data:`STRING`. + +Note that these are fixed length string values and behave differently from +NVARCHAR2. +""" + +DB_TYPE_NCLOB: DbType = base_impl.DB_TYPE_NCLOB +""" +Describes columns, attributes or array elements in a database that are of type +NCLOB. It will compare equal to the DB API type :data:`STRING`. +""" + +DB_TYPE_NUMBER: DbType = base_impl.DB_TYPE_NUMBER +""" +Describes columns, attributes or array elements in a database that are of type +NUMBER. It will compare equal to the DB API type :data:`NUMBER`. +""" + +DB_TYPE_NVARCHAR: DbType = base_impl.DB_TYPE_NVARCHAR +""" +Describes columns, attributes or array elements in a database that are of type +NVARCHAR2. It will compare equal to the DB API type :data:`STRING`. +""" + +DB_TYPE_OBJECT: DbType = base_impl.DB_TYPE_OBJECT +""" +Describes columns, attributes or array elements in a database that are an +instance of a named SQL or PL/SQL type. +""" + +DB_TYPE_RAW: DbType = base_impl.DB_TYPE_RAW +""" +Describes columns, attributes or array elements in a database that are of type +RAW. It will compare equal to the DB API type :data:`BINARY`. +""" + +DB_TYPE_ROWID: DbType = base_impl.DB_TYPE_ROWID +""" +Describes columns, attributes or array elements in a database that are of type +ROWID or UROWID. It will compare equal to the DB API type :data:`ROWID`. +""" + +DB_TYPE_TIMESTAMP: DbType = base_impl.DB_TYPE_TIMESTAMP +""" +Describes columns, attributes or array elements in a database that are of type +TIMESTAMP. It will compare equal to the DB API type :data:`DATETIME`. +""" + +DB_TYPE_TIMESTAMP_LTZ: DbType = base_impl.DB_TYPE_TIMESTAMP_LTZ +""" +Describes columns, attributes or array elements in a database that are of type +TIMESTAMP WITH LOCAL TIME ZONE. It will compare equal to the DB API type +:data:`DATETIME`. +""" + +DB_TYPE_TIMESTAMP_TZ: DbType = base_impl.DB_TYPE_TIMESTAMP_TZ +""" +Describes columns, attributes or array elements in a database that are of type +TIMESTAMP WITH TIME ZONE. It will compare equal to the DB API type +:data:`DATETIME`. +""" + +DB_TYPE_UNKNOWN: DbType = base_impl.DB_TYPE_UNKNOWN +""" +Describes columns, attributes or array elements in a database that are of an +unknown type. +""" + +DB_TYPE_UROWID: DbType = base_impl.DB_TYPE_UROWID +""" +Describes columns, attributes or array elements in a database that are of type +UROWID. It will compare equal to the DB API type :data:`ROWID`. +""" + +DB_TYPE_VARCHAR: DbType = base_impl.DB_TYPE_VARCHAR +""" +Describes columns, attributes or array elements in a database that are of type +VARCHAR2. It will compare equal to the DB API type :data:`STRING`. +""" + +DB_TYPE_VECTOR: DbType = base_impl.DB_TYPE_VECTOR +""" +Describes columns, attributes or array elements in a database that are of type +VECTOR (with Oracle Database 23 or later). +""" + +DB_TYPE_XMLTYPE: DbType = base_impl.DB_TYPE_XMLTYPE +""" +Describes columns, attributes or array elements in a database that are of type +SYS.XMLTYPE. +""" + + +# AQ dequeue modes +DEQ_BROWSE: int = constants.DEQ_BROWSE +""" +This constant is used to specify that dequeue should read the message without +acquiring any lock on the message (equivalent to a select statement). +""" + +DEQ_LOCKED: int = constants.DEQ_LOCKED +""" +This constant is used to specify that dequeue should read and obtain a write +lock on the message for the duration of the transaction (equivalent to a select +for update statement). +""" + +DEQ_REMOVE: int = constants.DEQ_REMOVE +""" +This constant is used to specify that dequeue should read the message and +update or delete it. This is the default value. +""" + +DEQ_REMOVE_NODATA: int = constants.DEQ_REMOVE_NODATA +""" +This constant is used to specify that dequeue should confirm receipt of the +message but not deliver the actual message content. +""" + + +# AQ dequeue navigation modes +DEQ_FIRST_MSG: int = constants.DEQ_FIRST_MSG +""" +This constant is used to specify that dequeue should retrieve the first +available message that matches the search criteria. This resets the +position to the beginning of the queue. +""" + +DEQ_NEXT_MSG: int = constants.DEQ_NEXT_MSG +""" +This constant is used to specify that dequeue should retrieve the next +available message that matches the search criteria. If the previous message +belongs to a message group, AQ retrieves the next available message that +matches the search criteria and belongs to the message group. This is the +default. +""" + +DEQ_NEXT_TRANSACTION: int = constants.DEQ_NEXT_TRANSACTION +""" +This constant is used to specify that dequeue should skip the remainder of the +transaction group and retrieve the first message of the next transaction group. +This option can only be used if message grouping is enabled for the current +queue. +""" + + +# AQ dequeue visibility modes +DEQ_IMMEDIATE: int = constants.DEQ_IMMEDIATE +""" +This constant is used to specify that dequeue should perform its work as part +of an independent transaction. +""" + +DEQ_ON_COMMIT: int = constants.DEQ_ON_COMMIT +""" +This constant is used to specify that dequeue should be part of the current +transaction. This is the default value. +""" + + +# AQ dequeue wait modes +DEQ_NO_WAIT: int = constants.DEQ_NO_WAIT +""" +This constant is used to specify that dequeue not wait for messages to be +available for dequeuing. +""" + +DEQ_WAIT_FOREVER: int = constants.DEQ_WAIT_FOREVER +""" +This constant is used to specify that dequeue should wait forever for messages +to be available for dequeuing. This is the default value. +""" + + +# AQ enqueue visibility modes +ENQ_IMMEDIATE: int = constants.ENQ_IMMEDIATE +""" +This constant is used to specify that enqueue should perform its work as +part of an independent transaction. + +The use of this constant with bulk enqueuing is only supported in +python-oracledb Thick mode. +""" + +ENQ_ON_COMMIT: int = constants.ENQ_ON_COMMIT +""" +This constant is used to specify that enqueue should be part of the current +transaction. This is the default value. +""" + + +# event types +EVENT_AQ: int = constants.EVENT_AQ +""" +This constant is used to specify that one or more messages are available for +dequeuing on the queue specified when the subscription was created. +""" + +EVENT_DEREG: int = constants.EVENT_DEREG +""" +This constant is used to specify that the subscription has been deregistered +and no further notifications will be sent. +""" + +EVENT_NONE: int = constants.EVENT_NONE +""" +This constant is used to specify no information is available about the event. +""" + +EVENT_OBJCHANGE: int = constants.EVENT_OBJCHANGE +""" +This constant is used to specify that a database change has taken place on a +table registered with the :meth:`Subscription.registerquery()` method. +""" + +EVENT_QUERYCHANGE: int = constants.EVENT_QUERYCHANGE +""" +This constant is used to specify that the result set of a query registered with +the :meth:`Subscription.registerquery()` method has been changed. +""" + +EVENT_SHUTDOWN: int = constants.EVENT_SHUTDOWN +""" +This constant is used to specify that the instance is in the process of being +shut down. +""" + +EVENT_SHUTDOWN_ANY: int = constants.EVENT_SHUTDOWN_ANY +""" +This constant is used to specify that any instance (when running RAC) is in the +process of being shut down. +""" + +EVENT_STARTUP: int = constants.EVENT_STARTUP +""" +This constant is used to specify that the instance is in the process of being +started up. +""" + + +# AQ delivery modes +MSG_BUFFERED: int = constants.MSG_BUFFERED +""" +This constant is used to specify that enqueue or dequeue operations should +enqueue or dequeue buffered messages, respectively. For multi-consumer queues, +a `subscriber `__ with buffered delivery mode +needs to be created prior to enqueuing buffered messages. + +This mode is not supported for bulk array operations in python-oracledb Thick +mode, and for JSON payloads. +""" + +MSG_PERSISTENT: int = constants.MSG_PERSISTENT +""" +This constant is used to specify that enqueue/dequeue operations should enqueue +or dequeue persistent messages. This is the default value. +""" + +MSG_PERSISTENT_OR_BUFFERED: int = constants.MSG_PERSISTENT_OR_BUFFERED +""" +This constant is used to specify that dequeue operations should dequeue either +persistent or buffered messages. +""" + + +# AQ message states +MSG_EXPIRED: int = constants.MSG_EXPIRED +""" +This constant is used to specify that the message has been moved to the +exception queue. +""" + +MSG_PROCESSED: int = constants.MSG_PROCESSED +""" +This constant is used to specify that the message has been processed and has +been retained. +""" + +MSG_READY: int = constants.MSG_READY +""" +This constant is used to specify that the message is ready to be processed. +""" + +MSG_WAITING: int = constants.MSG_WAITING +""" +This constant is used to specify that the message delay has not yet been +reached. +""" + + +# other AQ constants +MSG_NO_DELAY: int = constants.MSG_NO_DELAY +""" +This constant is a possible value for the :attr:`~MessageProperties.delay` +attribute of the message properties object passed as the ``msgproperties`` +parameter to the :meth:`Queue.deqone()` or :meth:`Queue.deqmany()` and +:meth:`Queue.enqone()` or :meth:`Queue.enqmany()` methods. It specifies that +no delay should be imposed and the message should be immediately available for +dequeuing. This is also the default value. +""" + +MSG_NO_EXPIRATION: int = constants.MSG_NO_EXPIRATION +""" +This constant is a possible value for the :attr:`~MessageProperties.expiration` +attribute of the message properties object passed as the ``msgproperties`` +parameter to the :meth:`Queue.deqone()` or :meth:`Queue.deqmany()` and +:meth:`Queue.enqone()` or :meth:`Queue.enqmany()` methods. It specifies that +the message never expires. This is also the default value. +""" + + +# operation codes (CQN) +OPCODE_ALLOPS: int = constants.OPCODE_ALLOPS +""" +This constant is used to specify that messages should be sent for all +operations. +""" + +OPCODE_ALLROWS: int = constants.OPCODE_ALLROWS +""" +This constant is used to specify that the table or query has been completely +invalidated. +""" + +OPCODE_ALTER: int = constants.OPCODE_ALTER +""" +This constant is used to specify that messages should be sent when a registered +table has been altered in some fashion by DDL, or that the message identifies a +table that has been altered. +""" + +OPCODE_DELETE: int = constants.OPCODE_DELETE +""" +This constant is used to specify that messages should be sent when data is +deleted, or that the message identifies a row that has been deleted. +""" + +OPCODE_DROP: int = constants.OPCODE_DROP +""" +This constant is used to specify that messages should be sent when a registered +table has been dropped, or that the message identifies a table that has been +dropped. +""" + +OPCODE_INSERT: int = constants.OPCODE_INSERT +""" +This constant is used to specify that messages should be sent when data is +inserted, or that the message identifies a row that has been inserted. +""" + +OPCODE_UPDATE: int = constants.OPCODE_UPDATE +""" +This constant is used to specify that messages should be sent when data is +updated, or that the message identifies a row that has been updated. +""" + + +# pipeline operation types +PIPELINE_OP_TYPE_CALL_FUNC: PipelineOpType = PipelineOpType.CALL_FUNC +""" +This constant identifies the type of operation as the calling of a stored +function. +""" + +PIPELINE_OP_TYPE_CALL_PROC: PipelineOpType = PipelineOpType.CALL_PROC +""" +This constant identifies the type of operation as the calling of a stored +procedure. +""" + +PIPELINE_OP_TYPE_COMMIT: PipelineOpType = PipelineOpType.COMMIT +""" +This constant identifies the type of operation as the performing of a commit. +""" + +PIPELINE_OP_TYPE_EXECUTE: PipelineOpType = PipelineOpType.EXECUTE +""" +This constant identifies the type of operation as the executing of a statement. +""" + +PIPELINE_OP_TYPE_EXECUTE_MANY: PipelineOpType = PipelineOpType.EXECUTE_MANY +""" +This constant identifies the type of operations as the executing of a statement +multiple times. +""" + +PIPELINE_OP_TYPE_FETCH_ALL: PipelineOpType = PipelineOpType.FETCH_ALL +""" +This constant identifies the type of operation as the executing of a query and +returning all of the rows from the result set. +""" + +PIPELINE_OP_TYPE_FETCH_MANY: PipelineOpType = PipelineOpType.FETCH_MANY +""" +This constant identifies the type of operation as the executing of a query and +returning up to the specified number of rows from the result set. +""" + +PIPELINE_OP_TYPE_FETCH_ONE: PipelineOpType = PipelineOpType.FETCH_ONE +""" +This constant identifies the type of operation as the executing of a query and +returning the first row of the result set. +""" + + +# connection pool "get" modes +POOL_GETMODE_FORCEGET: PoolGetMode = PoolGetMode.FORCEGET +""" +This constant is used to specify that a new connection should be created and +returned by :meth:`ConnectionPool.acquire()` if there are no free connections +available in the pool and the pool is already at its maximum size. + +When a connection acquired in this mode is eventually released back to the +pool, it will be dropped and not added to the pool if the pool is still at its +maximum size. + +This constant deprecates the ``SPOOL_ATTRVAL_FORCEGET`` constant that was used +in the obsolete cx_Oracle driver. +""" + +POOL_GETMODE_NOWAIT: PoolGetMode = PoolGetMode.NOWAIT +""" +This constant is used to specify that an exception should be raised by +:meth:`ConnectionPool.acquire()` when all currently created connections are +already in use and so :meth:`~ConnectionPool.acquire()` cannot immediately +return a connection. Note the exception may occur even if the pool is smaller +than its maximum size. + +This constant deprecates the ``SPOOL_ATTRVAL_NOWAIT`` constant that was used in +the obsolete cx_Oracle driver, and was the default ``getmode`` value. +""" + +POOL_GETMODE_TIMEDWAIT: PoolGetMode = PoolGetMode.TIMEDWAIT +""" +This constant is used to specify that :meth:`ConnectionPool.acquire()` should +wait for a period of time (defined by the ``wait_timeout`` parameter) for a +connection to become available before returning with an error. + +This constant deprecates the ``SPOOL_ATTRVAL_TIMEDWAIT`` constant that was used +in the obsolete cx_Oracle driver. +""" + +POOL_GETMODE_WAIT: PoolGetMode = PoolGetMode.WAIT +""" +This constant is used to specify that :meth:`ConnectionPool.acquire()` should +wait until a connection is available if there are currently no free connections +available in the pool. This is the default value. + +This constant deprecates the ``SPOOL_ATTRVAL_WAIT`` constant that was used in +the obsolete cx_Oracle driver. +""" + + +# connection pool purity +PURITY_DEFAULT: Purity = Purity.DEFAULT +""" +This constant is used to specify that the purity of the session is the default +value identified by Oracle (see Oracle's documentation for more information). +This is the default value. + +This constant deprecates the ``ATTR_PURITY_DEFAULT`` constant that was used in +the obsolete cx_Oracle driver, and was the default ``purity`` value. +""" + +PURITY_NEW: Purity = Purity.NEW +""" +This constant is used to specify that the session acquired from the pool should +be new and not have any prior session state. + +This constant deprecates the ``ATTR_PURITY_NEW`` constant that was used in the +obsolete cx_Oracle driver. +""" + +PURITY_SELF: Purity = Purity.SELF +""" +This constant is used to specify that the session acquired from the pool need +not be new and may have prior session state. + +This constant deprecates the ``ATTR_PURITY_SELF`` constant that was used in the +obsolete cx_Oracle driver. +""" + + +# subscription grouping classes +SUBSCR_GROUPING_CLASS_NONE: int = constants.SUBSCR_GROUPING_CLASS_NONE +""" +This constant is used to specify that no grouping should take place. +""" + +SUBSCR_GROUPING_CLASS_TIME: int = constants.SUBSCR_GROUPING_CLASS_TIME +""" +This constant is used to specify that events are to be grouped by the period of +time in which they are received. +""" + + +# subscription grouping types +SUBSCR_GROUPING_TYPE_SUMMARY: int = constants.SUBSCR_GROUPING_TYPE_SUMMARY +""" +This constant is used to specify that when events are grouped a summary of the +events should be sent instead of the individual events. This is the default +value. +""" + +SUBSCR_GROUPING_TYPE_LAST: int = constants.SUBSCR_GROUPING_TYPE_LAST +""" +This constant is used to specify that when events are grouped the last event +that makes up the group should be sent instead of the individual events. +""" + + +# subscription namespaces +SUBSCR_NAMESPACE_AQ: int = constants.SUBSCR_NAMESPACE_AQ +""" +This constant is used to specify that notifications should be sent when a queue +has messages available to dequeue. +""" + +SUBSCR_NAMESPACE_DBCHANGE: int = constants.SUBSCR_NAMESPACE_DBCHANGE +""" +This constant is used to specify that database change notification or query +change notification messages are to be sent. This is the default value. +""" + + +# subscription protocols +SUBSCR_PROTO_CALLBACK: int = constants.SUBSCR_PROTO_CALLBACK +""" +This constant is used to specify that notifications will be sent to the +callback routine identified when the subscription was created. It is the +default value and the only value currently supported. +""" + +SUBSCR_PROTO_HTTP: int = constants.SUBSCR_PROTO_HTTP +""" +This constant is used to specify that notifications will be sent to an HTTP +URL when a message is generated. This value is currently not supported. +""" + +SUBSCR_PROTO_MAIL: int = constants.SUBSCR_PROTO_MAIL +""" +This constant is used to specify that notifications will be sent to an e-mail +address when a message is generated. This value is currently not supported. +""" + +SUBSCR_PROTO_SERVER: int = constants.SUBSCR_PROTO_SERVER +""" +This constant is used to specify that notifications will be sent to a PL/SQL +procedure when a message is generated. This value is currently not supported. +""" + + +# subscription quality of service +SUBSCR_QOS_BEST_EFFORT: int = constants.SUBSCR_QOS_BEST_EFFORT +""" +This constant is used to specify that best effort filtering for query result +set changes is acceptable. False positive notifications may be received. This +behaviour may be suitable for caching applications. +""" + +SUBSCR_QOS_DEFAULT: int = constants.SUBSCR_QOS_DEFAULT +""" +This constant is used to specify that the default behavior for subscriptions +should be used. +""" + +SUBSCR_QOS_DEREG_NFY: int = constants.SUBSCR_QOS_DEREG_NFY +""" +This constant is used to specify that the subscription should be automatically +unregistered after the first notification is received. +""" + +SUBSCR_QOS_QUERY: int = constants.SUBSCR_QOS_QUERY +""" +This constant is used to specify that notifications should be sent if the +result set of the registered query changes. By default, no false positive +notifications will be generated. +""" + +SUBSCR_QOS_RELIABLE: int = constants.SUBSCR_QOS_RELIABLE +""" +This constant is used to specify that notifications should not be lost in the +event of database failure. +""" + +SUBSCR_QOS_ROWIDS: int = constants.SUBSCR_QOS_ROWIDS +""" +This constant is used to specify that the rowids of the inserted, updated or +deleted rows should be included in the message objects that are sent. +""" + + +# flags for tpc_begin() +TPC_BEGIN_JOIN: int = base_impl.TPC_TXN_FLAGS_JOIN +""" +This constant is used to join an existing TPC transaction. +""" + +TPC_BEGIN_NEW: int = base_impl.TPC_TXN_FLAGS_NEW +""" +This constant is used to create a new TPC transaction. +""" + +TPC_BEGIN_PROMOTE: int = base_impl.TPC_TXN_FLAGS_PROMOTE +""" +This constant is used to promote a local transaction to a TPC transaction. +""" + +TPC_BEGIN_RESUME: int = base_impl.TPC_TXN_FLAGS_RESUME +""" +This constant is used to resume an existing TPC transaction. +""" + + +# flags for tpc_end() +TPC_END_NORMAL: int = constants.TPC_END_NORMAL +""" +This constant is used to end TPC transaction participation normally. +""" + +TPC_END_SUSPEND: int = constants.TPC_END_SUSPEND +""" +This constant is used to suspend a TPC transaction. +""" + + +# vector formats +VECTOR_FORMAT_BINARY: VectorFormat = VectorFormat.BINARY +""" +This constant is used to represent the storage format of VECTOR columns using +8-bit unsigned integers. +""" + +VECTOR_FORMAT_FLOAT32: VectorFormat = VectorFormat.FLOAT32 +""" +This constant is used to represent the storage format of VECTOR columns using +32-bit floating point numbers. +""" + +VECTOR_FORMAT_FLOAT64: VectorFormat = VectorFormat.FLOAT64 +""" +This constant is used to represent the storage format of VECTOR columns using +64-bit floating point numbers. +""" + +VECTOR_FORMAT_INT8: VectorFormat = VectorFormat.INT8 +""" +This constant is used to represent the storage format of VECTOR columns using +8-bit signed integers. +""" + + +from .connection import ( # noqa: E402 + AsyncConnection as AsyncConnection, + connect as connect, + connect_async as connect_async, + Connection as Connection, +) + +from .cursor import ( # noqa: E402 + AsyncCursor as AsyncCursor, + Cursor as Cursor, +) + +from .pool import ( # noqa: E402 + AsyncConnectionPool as AsyncConnectionPool, + ConnectionPool as ConnectionPool, + create_pool as create_pool, + create_pool_async as create_pool_async, + get_pool as get_pool, +) + +from .subscr import ( # noqa: E402 + Subscription as Subscription, + Message as Message, + MessageQuery as MessageQuery, + MessageRow as MessageRow, + MessageTable as MessageTable, +) + +from .aq import ( # noqa: E402 + Queue as Queue, + AsyncQueue as AsyncQueue, + DeqOptions as DeqOptions, + EnqOptions as EnqOptions, + MessageProperties as MessageProperties, +) + +from .connect_params import ConnectParams as ConnectParams # noqa: E402 + +from .pool_params import PoolParams as PoolParams # noqa: E402 + +from . import builtin_hooks # noqa: E402 + +IntervalYM = collections.namedtuple("IntervalYM", ["years", "months"]) + + +class JsonId(bytes): + pass + + +# initialize implementations +package = sys.modules[__name__] +base_impl.init_base_impl(package) +thick_impl.init_thick_impl(package) +thin_impl.init_thin_impl(package) +del package + +# remove unnecessary symbols +del ( + aq, # noqa + base_impl, # noqa + builtin_hooks, # noqa + connect_params, # noqa + connection, # noqa + constants, # noqa + constructors, # noqa + cursor, # noqa + dbobject, # noqa + driver_mode, # noqa + dsn, # noqa + errors, # noqa + exceptions, # noqa + fetch_info, # noqa + future, # noqa + lob, # noqa + pipeline, # noqa + pool, # noqa + pool_params, # noqa + sparse_vector, # noqa + soda, # noqa + subscr, # noqa + sys, # noqa + thick_impl, # noqa + thin_impl, # noqa + utils, # noqa + var, # noqa + warnings, # noqa +) + +# general aliases (for backwards compatibility) +ObjectType = DbObjectType +Object = DbObject +SessionPool = ConnectionPool +version = __version__ + +# aliases for database types (for backwards compatibility) +BFILE = DB_TYPE_BFILE +BLOB = DB_TYPE_BLOB +BOOLEAN = DB_TYPE_BOOLEAN +CLOB = DB_TYPE_CLOB +CURSOR = DB_TYPE_CURSOR +FIXED_CHAR = DB_TYPE_CHAR +FIXED_NCHAR = DB_TYPE_NCHAR +INTERVAL = DB_TYPE_INTERVAL_DS +LONG_BINARY = DB_TYPE_LONG_RAW +LONG_STRING = DB_TYPE_LONG +NATIVE_INT = DB_TYPE_BINARY_INTEGER +NATIVE_FLOAT = DB_TYPE_BINARY_DOUBLE +NCHAR = DB_TYPE_NVARCHAR +OBJECT = DB_TYPE_OBJECT +NCLOB = DB_TYPE_NCLOB +TIMESTAMP = DB_TYPE_TIMESTAMP + +# aliases for authentication modes (for backwards compatibility) +DEFAULT_AUTH = AUTH_MODE_DEFAULT +SYSASM = AUTH_MODE_SYSASM +SYSBKP = AUTH_MODE_SYSBKP +SYSDBA = AUTH_MODE_SYSDBA +SYSDGD = AUTH_MODE_SYSDGD +SYSKMT = AUTH_MODE_SYSKMT +SYSOPER = AUTH_MODE_SYSOPER +SYSRAC = AUTH_MODE_SYSRAC +PRELIM_AUTH = AUTH_MODE_PRELIM + +# aliases for pool "get" modes (for backwards compatibility) +SPOOL_ATTRVAL_WAIT = POOL_GETMODE_WAIT +SPOOL_ATTRVAL_NOWAIT = POOL_GETMODE_NOWAIT +SPOOL_ATTRVAL_FORCEGET = POOL_GETMODE_FORCEGET +SPOOL_ATTRVAL_TIMEDWAIT = POOL_GETMODE_TIMEDWAIT + +# aliases for purity (for backwards compatibility) +ATTR_PURITY_DEFAULT = PURITY_DEFAULT +ATTR_PURITY_NEW = PURITY_NEW +ATTR_PURITY_SELF = PURITY_SELF + +# aliases for subscription protocols (for backwards compatibility) +SUBSCR_PROTO_OCI = SUBSCR_PROTO_CALLBACK diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..7832b5b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/aq.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/aq.cpython-39.pyc new file mode 100644 index 0000000..0d0e46e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/aq.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/arrow_array.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/arrow_array.cpython-39.pyc new file mode 100644 index 0000000..55a3ce4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/arrow_array.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..b0036b5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/builtin_hooks.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/builtin_hooks.cpython-39.pyc new file mode 100644 index 0000000..d588ade Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/builtin_hooks.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/connect_params.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/connect_params.cpython-39.pyc new file mode 100644 index 0000000..ddb7e8a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/connect_params.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/connection.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/connection.cpython-39.pyc new file mode 100644 index 0000000..febc422 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/connection.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/constants.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/constants.cpython-39.pyc new file mode 100644 index 0000000..ae3d916 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/constants.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/constructors.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/constructors.cpython-39.pyc new file mode 100644 index 0000000..6237e99 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/constructors.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/cursor.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/cursor.cpython-39.pyc new file mode 100644 index 0000000..2e1a645 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/cursor.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/dataframe.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/dataframe.cpython-39.pyc new file mode 100644 index 0000000..fc833f7 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/dataframe.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/dbobject.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/dbobject.cpython-39.pyc new file mode 100644 index 0000000..2259221 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/dbobject.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/defaults.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/defaults.cpython-39.pyc new file mode 100644 index 0000000..45408cc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/defaults.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/driver_mode.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/driver_mode.cpython-39.pyc new file mode 100644 index 0000000..7024e44 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/driver_mode.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/dsn.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/dsn.cpython-39.pyc new file mode 100644 index 0000000..ba23594 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/dsn.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/enums.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/enums.cpython-39.pyc new file mode 100644 index 0000000..87f6bbc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/enums.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/errors.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/errors.cpython-39.pyc new file mode 100644 index 0000000..22b4db5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/errors.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/exceptions.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..d966a36 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/exceptions.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/fetch_info.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/fetch_info.cpython-39.pyc new file mode 100644 index 0000000..62326b0 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/fetch_info.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/future.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/future.cpython-39.pyc new file mode 100644 index 0000000..5fecc90 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/future.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/lob.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/lob.cpython-39.pyc new file mode 100644 index 0000000..360aac5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/lob.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/pipeline.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/pipeline.cpython-39.pyc new file mode 100644 index 0000000..2032116 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/pipeline.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/pool.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/pool.cpython-39.pyc new file mode 100644 index 0000000..bec86f4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/pool.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/pool_params.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/pool_params.cpython-39.pyc new file mode 100644 index 0000000..cfc19f0 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/pool_params.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/soda.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/soda.cpython-39.pyc new file mode 100644 index 0000000..2c129aa Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/soda.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/sparse_vector.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/sparse_vector.cpython-39.pyc new file mode 100644 index 0000000..da0f04e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/sparse_vector.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/subscr.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/subscr.cpython-39.pyc new file mode 100644 index 0000000..12d2294 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/subscr.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/utils.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..4c58815 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/utils.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/var.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/var.cpython-39.pyc new file mode 100644 index 0000000..eed4ee4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/var.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/__pycache__/version.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/version.cpython-39.pyc new file mode 100644 index 0000000..e2a4c99 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/__pycache__/version.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/aq.py b/.venv/lib/python3.9/site-packages/oracledb/aq.py new file mode 100644 index 0000000..ebca3ec --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/aq.py @@ -0,0 +1,659 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# aq.py +# +# Contains the classes used for handling Advanced Queuing (AQ): Queue, +# DeqOptions, EnqOptions and MessageProperties. +# ----------------------------------------------------------------------------- + +import datetime + +from . import connection as connection_module +from typing import Any, Union +from . import errors +from .base import BaseMetaClass +from .dbobject import DbObject, DbObjectType + + +class BaseQueue(metaclass=BaseMetaClass): + @classmethod + def _from_impl(cls, connection, impl): + queue = cls.__new__(cls) + queue._connection = connection + queue._deq_options = DeqOptions._from_impl(impl.deq_options_impl) + queue._enq_options = EnqOptions._from_impl(impl.enq_options_impl) + queue._payload_type = None + queue._impl = impl + return queue + + def _verify_message(self, message: "MessageProperties") -> None: + """ + Internal method used for verifying a message. + """ + if not isinstance(message, MessageProperties): + raise TypeError("expecting MessageProperties object") + if message.payload is None: + errors._raise_err(errors.ERR_MESSAGE_HAS_NO_PAYLOAD) + if isinstance(self.payload_type, DbObjectType): + if ( + not isinstance(message.payload, DbObject) + or message.payload.type != self.payload_type + ): + errors._raise_err(errors.ERR_PAYLOAD_CANNOT_BE_ENQUEUED) + elif self.payload_type == "JSON": + if not isinstance(message.payload, (dict, list)): + errors._raise_err(errors.ERR_PAYLOAD_CANNOT_BE_ENQUEUED) + else: + if not isinstance(message.payload, (str, bytes)): + errors._raise_err(errors.ERR_PAYLOAD_CANNOT_BE_ENQUEUED) + + @property + def connection(self) -> "connection_module.Connection": + """ + This read-only attribute returns a reference to the connection object + on which the queue was created. + """ + return self._connection + + @property + def deqoptions(self) -> "DeqOptions": + """ + This read-only attribute returns a reference to the options that will + be used when dequeuing messages from the queue. + """ + return self._deq_options + + @property + def deqOptions(self) -> "DeqOptions": + """ + Deprecated: use deqoptions instead. + """ + return self.deqoptions + + @property + def enqoptions(self) -> "EnqOptions": + """ + This read-only attribute returns a reference to the options that will + be used when enqueuing messages into the queue. + """ + return self._enq_options + + @property + def enqOptions(self) -> "EnqOptions": + """ + Deprecated: use enqoptions() instead. + """ + return self.enqoptions + + @property + def name(self) -> str: + """ + This read-only attribute returns the name of the queue. + """ + return self._impl.name + + @property + def payload_type(self) -> Union[DbObjectType, None]: + """ + This read-only attribute returns the object type for payloads that can + be enqueued and dequeued. If using a JSON queue, this returns the value + "JSON". If using a raw queue, this returns the value *None*. + """ + if self._payload_type is None: + if self._impl.is_json: + self._payload_type = "JSON" + elif self._impl.payload_type is not None: + self._payload_type = DbObjectType._from_impl( + self._impl.payload_type + ) + return self._payload_type + + @property + def payloadType(self) -> Union[DbObjectType, None]: + """ + Deprecated: use payload_type instead. + """ + return self.payload_type + + +class Queue(BaseQueue): + + def deqmany(self, max_num_messages: int) -> list["MessageProperties"]: + """ + Dequeues up to the specified number of messages from the queue and + returns a list of these messages. + """ + if self._impl._supports_deq_many(self._connection._impl): + message_impls = self._impl.deq_many(max_num_messages) + else: + message_impls = [] + while len(message_impls) < max_num_messages: + message_impl = self._impl.deq_one() + if message_impl is None: + break + message_impls.append(message_impl) + return [MessageProperties._from_impl(impl) for impl in message_impls] + + def deqMany(self, max_num_messages: int) -> list["MessageProperties"]: + """ + Deprecated: use deqmany() instead. + """ + return self.deqmany(max_num_messages) + + def deqone(self) -> Union["MessageProperties", None]: + """ + Dequeues at most one message from the queue and returns it. If no + message is dequeued, None is returned. + """ + message_impl = self._impl.deq_one() + if message_impl is not None: + return MessageProperties._from_impl(message_impl) + + def deqOne(self) -> Union["MessageProperties", None]: + """ + Deprecated: use deqone() instead. + """ + return self.deqone() + + def enqmany(self, messages: list["MessageProperties"]) -> None: + """ + Enqueues multiple messages into the queue. The messages parameter must + be a sequence containing message property objects which have all had + their payload attribute set to a value that the queue supports. + + Warning: In python-oracledb Thick mode using Oracle Client libraries + prior to 21c, calling :meth:`Queue.enqmany()` in parallel on different + connections acquired from the same connection pool may fail due to + Oracle bug 29928074. To avoid this, do one of: upgrade the client + libraries, ensure that :meth:`Queue.enqmany()` is not run in parallel, + use standalone connections or connections from different pools, or make + multiple calls to :meth:`Queue.enqone()`. The function + :meth:`Queue.deqmany()` call is not affected. + """ + for message in messages: + self._verify_message(message) + message_impls = [m._impl for m in messages] + self._impl.enq_many(message_impls) + + def enqMany(self, messages: list["MessageProperties"]) -> None: + """ + Deprecated: use enqmany() instead. + """ + return self.enqmany(messages) + + def enqone(self, message: "MessageProperties") -> None: + """ + Enqueues a single message into the queue. The message must be a message + property object which has had its payload attribute set to a value that + the queue supports. + """ + self._verify_message(message) + self._impl.enq_one(message._impl) + + def enqOne(self, message: "MessageProperties") -> None: + """ + Deprecated: use enqone() instead. + """ + return self.enqone(message) + + +class AsyncQueue(BaseQueue): + + async def deqmany( + self, max_num_messages: int + ) -> list["MessageProperties"]: + """ + Dequeues up to the specified number of messages from the queue and + returns a list of these messages. + """ + message_impls = await self._impl.deq_many(max_num_messages) + return [MessageProperties._from_impl(impl) for impl in message_impls] + + async def deqone(self) -> Union["MessageProperties", None]: + """ + Dequeues at most one message from the queue and returns it. If no + message is dequeued, None is returned. + """ + message_impl = await self._impl.deq_one() + if message_impl is not None: + return MessageProperties._from_impl(message_impl) + + async def enqmany(self, messages: list["MessageProperties"]) -> None: + """ + Enqueues multiple messages into the queue. The messages parameter must + be a sequence containing message property objects which have all had + their payload attribute set to a value that the queue supports. + + Warning: calling this function in parallel on different connections + acquired from the same pool may fail due to Oracle bug 29928074. Ensure + that this function is not run in parallel, use standalone connections + or connections from different pools, or make multiple calls to + enqone() instead. The function Queue.deqmany() call is not affected. + """ + for message in messages: + self._verify_message(message) + message_impls = [m._impl for m in messages] + await self._impl.enq_many(message_impls) + + async def enqone(self, message: "MessageProperties") -> None: + """ + Enqueues a single message into the queue. The message must be a message + property object which has had its payload attribute set to a value that + the queue supports. + """ + self._verify_message(message) + await self._impl.enq_one(message._impl) + + +class DeqOptions(metaclass=BaseMetaClass): + @classmethod + def _from_impl(cls, impl): + options = cls.__new__(cls) + options._impl = impl + return options + + @property + def condition(self) -> str: + """ + This read-write attribute specifies a boolean expression similar to the + where clause of a SQL query. The boolean expression can include + conditions on message properties, user data properties, and PL/SQL or + SQL functions. The default is to have no condition specified. + """ + return self._impl.get_condition() + + @condition.setter + def condition(self, value: str) -> None: + self._impl.set_condition(value) + + @property + def consumername(self) -> str: + """ + This read-write attribute specifies the name of the consumer. Only + messages matching the consumer name will be accessed. If the queue is + not set up for multiple consumers this attribute should not be set. The + default is to have no consumer name specified. + """ + return self._impl.get_consumer_name() + + @consumername.setter + def consumername(self, value: str) -> None: + self._impl.set_consumer_name(value) + + @property + def correlation(self) -> str: + """ + This read-write attribute specifies the correlation identifier of the + message to be dequeued. Special pattern-matching characters, such as + the percent sign (%) and the underscore (_), can be used. If multiple + messages satisfy the pattern, the order of dequeuing is indeterminate. + The default is to have no correlation specified. + """ + return self._impl.get_correlation() + + @correlation.setter + def correlation(self, value: str) -> None: + self._impl.set_correlation(value) + + @property + def deliverymode(self) -> int: + """ + This write-only attribute specifies what types of messages should be + dequeued. It should be one of the values + :data:`~oracledb.MSG_PERSISTENT` (default), + :data:`~oracledb.MSG_BUFFERED`, or + :data:`~oracledb.MSG_PERSISTENT_OR_BUFFERED`. + + Note that :data:`~oracledb.MSG_BUFFERED` is not supported for JSON + payloads. + """ + raise AttributeError("deliverymode can only be written") + + @deliverymode.setter + def deliverymode(self, value: int) -> None: + self._impl.set_delivery_mode(value) + + @property + def mode(self) -> int: + """ + This read-write attribute specifies the locking behaviour associated + with the dequeue operation. It should be one of the values + :data:`~oracledb.DEQ_BROWSE`, :data:`~oracledb.DEQ_LOCKED`, + :data:`~oracledb.DEQ_REMOVE` (default), or + :data:`~oracledb.DEQ_REMOVE_NODATA`. + """ + return self._impl.get_mode() + + @mode.setter + def mode(self, value: int) -> None: + self._impl.set_mode(value) + + @property + def msgid(self) -> bytes: + """ + This read-write attribute specifies the identifier of the message to + be dequeued. The default is to have no message identifier specified. + """ + return self._impl.get_message_id() + + @msgid.setter + def msgid(self, value: bytes) -> None: + self._impl.set_message_id(value) + + @property + def navigation(self) -> int: + """ + This read-write attribute specifies the position of the message that is + retrieved. It should be one of the values + :data:`~oracledb.DEQ_FIRST_MSG`, :data:`~oracledb.DEQ_NEXT_MSG` + (default), or :data:`~oracledb.DEQ_NEXT_TRANSACTION`. + """ + return self._impl.get_navigation() + + @navigation.setter + def navigation(self, value: int) -> None: + self._impl.set_navigation(value) + + @property + def transformation(self) -> str: + """ + This read-write attribute specifies the name of the transformation that + must be applied after the message is dequeued from the database but + before it is returned to the calling application. The transformation + must be created using dbms_transform. The default is to have no + transformation specified. + """ + return self._impl.get_transformation() + + @transformation.setter + def transformation(self, value: str) -> None: + self._impl.set_transformation(value) + + @property + def visibility(self) -> int: + """ + This read-write attribute specifies the transactional behavior of the + dequeue request. It should be one of the values + :data:`~oracledb.DEQ_ON_COMMIT` (default) or + :data:`~oracledb.DEQ_IMMEDIATE`. This attribute is ignored when using + the :data:`~oracledb.DEQ_BROWSE` mode. Note the value of + :attr:`~Connection.autocommit` is always ignored. + """ + return self._impl.get_visibility() + + @visibility.setter + def visibility(self, value: int) -> None: + self._impl.set_visibility(value) + + @property + def wait(self) -> int: + """ + This read-write attribute specifies the time to wait, in seconds, for a + message matching the search criteria to become available for dequeuing. + One of the values :data:`~oracledb.DEQ_NO_WAIT` or + :data:`~oracledb.DEQ_WAIT_FOREVER` can also be used. The default is + :data:`~oracledb.DEQ_WAIT_FOREVER`. + """ + return self._impl.get_wait() + + @wait.setter + def wait(self, value: int) -> None: + self._impl.set_wait(value) + + +class EnqOptions(metaclass=BaseMetaClass): + @classmethod + def _from_impl(cls, impl): + options = cls.__new__(cls) + options._impl = impl + return options + + @property + def deliverymode(self) -> int: + """ + This write-only attribute specifies what type of messages should be + enqueued. It should be one of the values + :data:`~oracledb.MSG_PERSISTENT` (default) or + :data:`~oracledb.MSG_BUFFERED`. + + Note that :data:`~oracledb.MSG_BUFFERED` is not supported for JSON + payloads. + """ + raise AttributeError("deliverymode can only be written") + + @deliverymode.setter + def deliverymode(self, value: int) -> None: + self._impl.set_delivery_mode(value) + + @property + def transformation(self) -> str: + """ + This read-write attribute specifies the name of the transformation that + must be applied before the message is enqueued into the database. The + transformation must be created using dbms_transform. The default is to + have no transformation specified. + """ + return self._impl.get_transformation() + + @transformation.setter + def transformation(self, value: str) -> None: + self._impl.set_transformation(value) + + @property + def visibility(self) -> int: + """ + This read-write attribute specifies the transactional behavior of the + enqueue request. It should be one of the values + :data:`~oracledb.ENQ_ON_COMMIT` (default) or + :data:`~oracledb.ENQ_IMMEDIATE`. Note the value of + :attr:`~Connection.autocommit` is ignored. + """ + return self._impl.get_visibility() + + @visibility.setter + def visibility(self, value: int) -> None: + self._impl.set_visibility(value) + + +class MessageProperties(metaclass=BaseMetaClass): + _recipients = [] + + @classmethod + def _from_impl(cls, impl): + props = cls.__new__(cls) + props._impl = impl + return props + + @property + def attempts(self) -> int: + """ + This read-only attribute specifies the number of attempts that have + been made to dequeue the message. + """ + return self._impl.get_num_attempts() + + @property + def correlation(self) -> str: + """ + This read-write attribute specifies the correlation used when the + message was enqueued. + """ + return self._impl.get_correlation() + + @correlation.setter + def correlation(self, value: str) -> None: + self._impl.set_correlation(value) + + @property + def delay(self) -> int: + """ + This read-write attribute specifies the number of seconds to delay an + enqueued message. Any integer is acceptable but the constant + :data:`~oracledb.MSG_NO_DELAY` can also be used indicating that the + message is available for immediate dequeuing. + """ + return self._impl.get_delay() + + @delay.setter + def delay(self, value: int) -> None: + self._impl.set_delay(value) + + @property + def deliverymode(self) -> int: + """ + This read-only attribute specifies the type of message that was + dequeued. It will be one of the values + :data:`~oracledb.MSG_PERSISTENT` or + :data:`~oracledb.MSG_BUFFERED`. + """ + return self._impl.get_delivery_mode() + + @property + def enqtime(self) -> datetime.datetime: + """ + This read-only attribute specifies the time that the message was + enqueued. + """ + return self._impl.get_enq_time() + + @property + def exceptionq(self) -> str: + """ + This read-write attribute specifies the name of the queue to which the + message is moved if it cannot be processed successfully. Messages are + moved if the number of unsuccessful dequeue attempts has exceeded the + maximum number of retries or if the message has expired. All messages + in the exception queue are in the :data:`~oracledb.MSG_EXPIRED` state. + The default value is the name of the exception queue associated with + the queue table. + """ + return self._impl.get_exception_queue() + + @exceptionq.setter + def exceptionq(self, value: str) -> None: + self._impl.set_exception_queue(value) + + @property + def expiration(self) -> int: + """ + This read-write attribute specifies, in seconds, how long the message + is available for dequeuing. This attribute is an offset from the delay + attribute. Expiration processing requires the queue monitor to be + running. Any integer is accepted but the constant + :data:`~oracledb.MSG_NO_EXPIRATION` can also be used indicating that + the message never expires. + """ + return self._impl.get_expiration() + + @expiration.setter + def expiration(self, value: int) -> None: + self._impl.set_expiration(value) + + @property + def msgid(self) -> bytes: + """ + This read-only attribute specifies the id of the message in the last + queue that enqueued or dequeued this message. If the message has never + been dequeued or enqueued, the value will be `None`. + """ + return self._impl.get_message_id() + + @property + def payload(self) -> Union[bytes, DbObject]: + """ + This read-write attribute specifies the payload that will be enqueued + or the payload that was dequeued when using a queue. When enqueuing, + the value is checked to ensure that it conforms to the type expected + by that queue. For RAW queues, the value can be a bytes object or a + string. If the value is a string it will be converted to bytes in the + encoding UTF-8. + """ + return self._impl.payload + + @payload.setter + def payload(self, value: Any) -> None: + if isinstance(value, DbObject): + self._impl.set_payload_object(value._impl) + elif not isinstance(value, (str, bytes)): + self._impl.set_payload_json(value) + else: + if isinstance(value, str): + value_bytes = value.encode() + elif isinstance(value, bytes): + value_bytes = value + self._impl.set_payload_bytes(value_bytes) + self._impl.payload = value + + @property + def priority(self) -> int: + """ + This read-write attribute specifies the priority of the message. A + smaller number indicates a higher priority. The priority can be any + integer, including negative numbers. The default value is 0. + """ + return self._impl.get_priority() + + @priority.setter + def priority(self, value: int) -> None: + self._impl.set_priority(value) + + @property + def recipients(self) -> list[str]: + """ + This read-write attribute specifies a list of recipient names that can + be associated with a message at the time a message is enqueued. This + allows a limited set of recipients to dequeue each message. The + recipient list associated with the message overrides the queue + subscriber list, if there is one. The recipient names need not be in + the subscriber list but can be, if desired. + + To dequeue a message, the consumername attribute can be set to one of + the recipient names. The original message recipient list is not + available on dequeued messages. All recipients have to dequeue a + message before it gets removed from the queue. + + Subscribing to a queue is like subscribing to a magazine: each + subscriber can dequeue all the messages placed into a specific queue, + just as each magazine subscriber has access to all its articles. Being + a recipient, however, is like getting a letter: each recipient is a + designated target of a particular message. + """ + return self._recipients + + @recipients.setter + def recipients(self, value: list) -> None: + self._impl.set_recipients(value) + self._recipients = value + + @property + def state(self) -> int: + """ + This read-only attribute specifies the state of the message at the time + of the dequeue. It will be one of the values + :data:`~oracledb.MSG_WAITING`, :data:`~oracledb.MSG_READY`, + :data:`~oracledb.MSG_PROCESSED`, or :data:`~oracledb.MSG_EXPIRED`. + """ + return self._impl.get_state() diff --git a/.venv/lib/python3.9/site-packages/oracledb/arrow_array.py b/.venv/lib/python3.9/site-packages/oracledb/arrow_array.py new file mode 100644 index 0000000..5ecb1c8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/arrow_array.py @@ -0,0 +1,112 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# arrow_array.py +# +# Implement an ArrowArray that is used for efficiently transferring Arrow +# array data to other data frame libraries. +# ----------------------------------------------------------------------------- + +from .arrow_impl import ArrowArrayImpl +from .base import BaseMetaClass + +from . import errors + + +class ArrowArray(metaclass=BaseMetaClass): + _impl = None + + def __init__(self): + errors._raise_err(errors.ERR_INTERNAL_CREATION_REQUIRED) + + def __len__(self): + return self.num_rows + + def __repr__(self): + return ( + f"ArrowArray(name={self.name}, " + f"len={self.num_rows}, " + f"type={self.dtype})" + ) + + def __str__(self): + return self.__repr__() + + @classmethod + def _from_arrow(cls, obj): + array = cls.__new__(cls) + array._impl = ArrowArrayImpl.from_arrow_array(obj) + return array + + @classmethod + def _from_impl(cls, impl): + array = cls.__new__(cls) + array._impl = impl + return array + + def __arrow_c_array__(self, requested_schema=None): + """ + Returns a tuple containing an ArrowSchema and ArrowArray PyCapsules. + """ + if requested_schema is not None: + raise NotImplementedError("requested_schema") + return ( + self._impl.get_schema_capsule(), + self._impl.get_array_capsule(), + ) + + def __arrow_c_schema__(self): + """ + Returns an ArrowSchema PyCapsule. + """ + return self._impl.get_schema_capsule() + + @property + def dtype(self) -> str: + """ + Returns the data type associated with the array. + """ + return self._impl.get_data_type() + + @property + def name(self) -> str: + """ + Returns the name associated with the array. + """ + return self._impl.get_name() + + @property + def null_count(self) -> int: + """ + Returns the number of rows that contain null values. + """ + return self._impl.get_null_count() + + @property + def num_rows(self) -> int: + """ + Returns the number of rows in the array. + """ + return self._impl.get_num_rows() diff --git a/.venv/lib/python3.9/site-packages/oracledb/arrow_impl.cpython-39-x86_64-linux-gnu.so b/.venv/lib/python3.9/site-packages/oracledb/arrow_impl.cpython-39-x86_64-linux-gnu.so new file mode 100755 index 0000000..5042024 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/arrow_impl.cpython-39-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/base.py b/.venv/lib/python3.9/site-packages/oracledb/base.py new file mode 100644 index 0000000..251414c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/base.py @@ -0,0 +1,48 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# base.py +# +# Contains base classes and methods that have no internal dependencies. +# ----------------------------------------------------------------------------- + +from . import __name__ as MODULE_NAME + + +# metaclass used by all oracledb classes; currently this only ensures that when +# the class is displayed it only shows the overall module name instead of any +# subpackage names +class BaseMetaClass(type): + + def __new__(cls, name, bases, attrs): + module_name = attrs["__module__"] + qual_name = attrs["__qualname__"] + if module_name.startswith(MODULE_NAME): + module_name = MODULE_NAME + attrs["_public_name"] = f"{module_name}.{qual_name}" + return super().__new__(cls, name, bases, attrs) + + def __repr__(cls): + return f"" diff --git a/.venv/lib/python3.9/site-packages/oracledb/base_impl.cpython-39-x86_64-linux-gnu.so b/.venv/lib/python3.9/site-packages/oracledb/base_impl.cpython-39-x86_64-linux-gnu.so new file mode 100755 index 0000000..b536a6d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/base_impl.cpython-39-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/builtin_hooks.py b/.venv/lib/python3.9/site-packages/oracledb/builtin_hooks.py new file mode 100644 index 0000000..00324b1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/builtin_hooks.py @@ -0,0 +1,102 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2024, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# config_provider.py +# +# Contains the built-in config providers. +# ----------------------------------------------------------------------------- + +import base64 +import json +import os +import urllib.parse +import warnings + +from . import errors +from .utils import register_password_type, register_protocol + + +def config_provider_file_hook(protocol, protocol_arg, connect_params): + """ + Hook for "config-file://". The protocol_arg is expected to be the name of a + file containing one or more configurations. An optional "key" parameter is + allowed which will choose a configuration from a set of configurations + stored in the file. + """ + pos = protocol_arg.find("?") + if pos < 0: + file_name = protocol_arg + key = None + else: + file_name = protocol_arg[:pos] + args = urllib.parse.parse_qs(protocol_arg[pos + 1 :]) + key = args.get("key") + if key is not None: + key = key[0] + if not os.path.isabs(file_name): + if connect_params.config_dir is None: + errors._raise_err(errors.ERR_NO_CONFIG_DIR) + file_name = os.path.join(connect_params.config_dir, file_name) + config = json.load(open(file_name)) + if key is not None: + config = config[key] + connect_params.set_from_config(config) + + +register_protocol("config-file", config_provider_file_hook) + + +def ldap_hook(protocol, arg, params): + """ + Default hook for LDAP which simply points the user to the documentation + which explains how they can write their own hook for LDAP. + This hook is needed for python-oracledb Thin mode,or when + defaults.thick_mode_dsn_passthrough is False in Thick mode. + """ + doc_url = ( + "https://python-oracledb.readthedocs.io/en/latest" + "/user_guide/connection_handling.html#ldap-directory-naming" + ) + message = ( + f"To use an LDAP URL in python-oracledb, " + f"register an LDAP resolution function as shown in {doc_url}" + ) + raise Exception(message) + + +register_protocol("ldap", ldap_hook) +register_protocol("ldaps", ldap_hook) + + +def password_type_base64_hook(args): + """ + Hook for password type "base64". The key "value" in the supplied args is + expected to be a base64-encoded string. + """ + warnings.warn("base64 encoded passwords are insecure") + return base64.b64decode(args["value"].encode()).decode() + + +register_password_type("base64", password_type_base64_hook) diff --git a/.venv/lib/python3.9/site-packages/oracledb/connect_params.py b/.venv/lib/python3.9/site-packages/oracledb/connect_params.py new file mode 100644 index 0000000..68410a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/connect_params.py @@ -0,0 +1,1266 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# connect_params.py +# +# Contains the ConnectParams class used for managing the parameters required to +# establish a connection to the database. +# +# *** NOTICE *** This file is generated from a template and should not be +# modified directly. See build_from_template.py in the utils subdirectory for +# more information. +# ----------------------------------------------------------------------------- + +import functools +import ssl +from typing import Union, Callable, Any, Optional + +import oracledb + +from .base import BaseMetaClass +from . import base_impl, utils + + +class ConnectParams(metaclass=BaseMetaClass): + """ + Contains all parameters used for establishing a connection to the + database. + """ + + __slots__ = ["_impl"] + _impl_class = base_impl.ConnectParamsImpl + + @utils.params_initer + def __init__( + self, + *, + user: Optional[str] = None, + proxy_user: Optional[str] = None, + password: Optional[str] = None, + newpassword: Optional[str] = None, + wallet_password: Optional[str] = None, + access_token: Optional[Union[str, tuple, Callable]] = None, + host: Optional[str] = None, + port: Optional[int] = None, + protocol: Optional[str] = None, + https_proxy: Optional[str] = None, + https_proxy_port: Optional[int] = None, + service_name: Optional[str] = None, + instance_name: Optional[str] = None, + sid: Optional[str] = None, + server_type: Optional[str] = None, + cclass: Optional[str] = None, + purity: Optional[oracledb.Purity] = None, + expire_time: Optional[int] = None, + retry_count: Optional[int] = None, + retry_delay: Optional[int] = None, + tcp_connect_timeout: Optional[float] = None, + ssl_server_dn_match: Optional[bool] = None, + ssl_server_cert_dn: Optional[str] = None, + wallet_location: Optional[str] = None, + events: Optional[bool] = None, + externalauth: Optional[bool] = None, + mode: Optional[oracledb.AuthMode] = None, + disable_oob: Optional[bool] = None, + stmtcachesize: Optional[int] = None, + edition: Optional[str] = None, + tag: Optional[str] = None, + matchanytag: Optional[bool] = None, + config_dir: Optional[str] = None, + appcontext: Optional[list] = None, + shardingkey: Optional[list] = None, + supershardingkey: Optional[list] = None, + debug_jdwp: Optional[str] = None, + connection_id_prefix: Optional[str] = None, + ssl_context: Optional[Any] = None, + sdu: Optional[int] = None, + pool_boundary: Optional[str] = None, + use_tcp_fast_open: Optional[bool] = None, + ssl_version: Optional[ssl.TLSVersion] = None, + program: Optional[str] = None, + machine: Optional[str] = None, + terminal: Optional[str] = None, + osuser: Optional[str] = None, + driver_name: Optional[str] = None, + use_sni: Optional[bool] = None, + thick_mode_dsn_passthrough: Optional[bool] = None, + extra_auth_params: Optional[dict] = None, + pool_name: Optional[str] = None, + handle: Optional[int] = None, + ): + """ + All parameters are optional. A brief description of each parameter + follows: + + - ``user``: the name of the database user to connect to + (default: None) + + - ``proxy_user``: the name of the proxy user to connect to. If this + value is not specified, it will be parsed out of user if user is in + the form "user[proxy_user]" + (default: None) + + - ``password``: the password for the database user + (default: None) + + - ``newpassword``: a new password for the database user. The new + password will take effect immediately upon a successful connection to + the database + (default: None) + + - ``wallet_password``: the password to use to decrypt the wallet, if it + is encrypted. This is not the database password. For Oracle + Autonomous Database this is the password created when downloading the + wallet. This value is only used in python-oracledb Thin mode. + (default: None) + + - ``access_token``: a string, or a 2-tuple, or a callable. If it is a + string, it specifies an Entra ID OAuth2 token used for Open + Authorization (OAuth 2.0) token based authentication. If it is a + 2-tuple, it specifies the token and private key strings used for + Oracle Cloud Infrastructure (OCI) Identity and Access Management + (IAM) token based authentication. If it is a callable, it returns + either a string or a 2-tuple used for OAuth 2.0 or OCI IAM token + based authentication and is useful when the pool needs to expand and + create new connections but the current authentication token has + expired + (default: None) + + - ``host``: the hostname or IP address of the machine hosting the + database or the database listener + (default: None) + + - ``port``: the port number on which the database listener is listening + (default: 1521) + + - ``protocol``: one of the strings "tcp" or "tcps" indicating whether + to use unencrypted network traffic or encrypted network traffic (TLS) + (default: "tcp") + + - ``https_proxy``: the hostname or IP address of a proxy host to use + for tunneling secure connections + (default: None) + + - ``https_proxy_port``: the port on which to communicate with the proxy + host + (default: 0) + + - ``service_name``: the service name of the database + (default: None) + + - ``instance_name``: the instance name of the database + (default: None) + + - ``sid``: the system identifier (SID) of the database. Note using a + service_name instead is recommended + (default: None) + + - ``server_type``: the type of server connection that should be + established. If specified, it should be one of strings "dedicated", + "shared" or "pooled" + (default: None) + + - ``cclass``: the connection class to use for Database Resident + Connection Pooling (DRCP) + (default: None) + + - ``purity``: the connection purity to use for Database Resident + Connection Pooling (DRCP) + (default: :attr:`oracledb.PURITY_DEFAULT`) + + - ``expire_time``: the number of minutes between the sending of + keepalive probes. If this parameter is set to a value greater than + zero it enables keepalive + (default: 0) + + - ``retry_count``: the number of times that initial connection + establishment should be retried before the connection attempt is + terminated + (default: 0) + + - ``retry_delay``: the number of seconds to wait before retrying to + establish a connection + (default: 1) + + - ``tcp_connect_timeout``: a float indicating the maximum number of + seconds to wait when establishing a connection to the database host + (default: 20.0) + + - ``ssl_server_dn_match``: a boolean indicating whether the server + certificate distinguished name (DN) should be matched in addition to + the regular certificate verification that is performed. Note that if + the ssl_server_cert_dn parameter is not privided, host name matching + is performed instead + (default: True) + + - ``ssl_server_cert_dn``: the distinguished name (DN) which should be + matched with the server. This value is ignored if the + ssl_server_dn_match parameter is not set to the value True. If + specified this value is used for any verfication. Otherwise the + hostname will be used + (default: None) + + - ``wallet_location``: the directory where the wallet can be found. In + python-oracledb Thin mode this must be the directory containing the + PEM-encoded wallet file ewallet.pem. In python-oracledb Thick mode + this must be the directory containing the file cwallet.sso + (default: None) + + - ``events``: a boolean specifying whether events mode should be + enabled. This value is only used in python-oracledb Thick mode and is + needed for continuous query notification and high availability event + notifications + (default: False) + + - ``externalauth``: a boolean indicating whether to use external + authentication + (default: False) + + - ``mode``: the authorization mode to use. One of the constants + :data:`oracledb.AUTH_MODE_DEFAULT`, + :data:`oracledb.AUTH_MODE_PRELIM`, :data:`oracledb.AUTH_MODE_SYSASM`, + :data:`oracledb.AUTH_MODE_SYSBKP`, :data:`oracledb.AUTH_MODE_SYSDBA`, + :data:`oracledb.AUTH_MODE_SYSDGD`, :data:`oracledb.AUTH_MODE_SYSKMT`, + :data:`oracledb.AUTH_MODE_SYSOPER`, or + :data:`oracledb.AUTH_MODE_SYSRAC` + (default: :attr:`oracledb.AUTH_MODE_DEFAULT`) + + - ``disable_oob``: a boolean indicating whether out-of-band breaks + should be disabled. This value is only used in python-oracledb Thin + mode. It has no effect on Windows which does not support this + functionality + (default: False) + + - ``stmtcachesize``: the size of the statement cache + (default: :attr:`oracledb.defaults.stmtcachesize + `) + + - ``edition``: edition to use for the connection. This parameter cannot + be used simultaneously with the cclass parameter + (default: None) + + - ``tag``: identifies the type of connection that should be returned + from a pool. This value is only used in python-oracledb Thick mode + (default: None) + + - ``matchanytag``: a boolean specifying whether any tag can be used + when acquiring a connection from the pool. This value is only used in + python-oracledb Thick mode + (default: False) + + - ``config_dir``: a directory in which the optional tnsnames.ora + configuration file is located. This value is only used in python- + oracledb Thin mode. For python-oracledb Thick mode, it is used if + :attr:`oracledb.defaults.thick_mode_dsn_passthrough + ` is *False*. Otherwise in Thick + mode use the ``config_dir`` parameter of + :meth:`oracledb.init_oracle_client()` + (default: :attr:`oracledb.defaults.config_dir + `) + + - ``appcontext``: application context used by the connection. It should + be a list of 3-tuples (namespace, name, value) and each entry in the + tuple should be a string + (default: None) + + - ``shardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + (default: None) + + - ``supershardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + (default: None) + + - ``debug_jdwp``: a string with the format "host=;port=" + that specifies the host and port of the PL/SQL debugger. This value + is only used in python-oracledb Thin mode. For python-oracledb Thick + mode set the ORA_DEBUG_JDWP environment variable + (default: None) + + - ``connection_id_prefix``: an application specific prefix that is + added to the connection identifier used for tracing + (default: None) + + - ``ssl_context``: an SSLContext object used for connecting to the + database using TLS. This SSL context will be modified to include the + private key or any certificates found in a separately supplied + wallet. This parameter should only be specified if the default + SSLContext object cannot be used + (default: None) + + - ``sdu``: the requested size of the Session Data Unit (SDU), in bytes. + The value tunes internal buffers used for communication to the + database. Bigger values can increase throughput for large queries or + bulk data loads, but at the cost of higher memory use. The SDU size + that will actually be used is negotiated down to the lower of this + value and the database network SDU configuration value + (default: 8192) + + - ``pool_boundary``: one of the values "statement" or "transaction" + indicating when pooled DRCP connections can be returned to the pool. + This requires the use of DRCP with Oracle Database 23.4 or higher + (default: None) + + - ``use_tcp_fast_open``: a boolean indicating whether to use TCP fast + open. This is an Oracle Autonomous Database Serverless (ADB-S) + specific property for clients connecting from within OCI Cloud + network. Please refer to the ADB-S documentation for more information + (default: False) + + - ``ssl_version``: one of the values ssl.TLSVersion.TLSv1_2 or + ssl.TLSVersion.TLSv1_3 indicating which TLS version to use + (default: None) + + - ``program``: a string recorded by Oracle Database as the program from + which the connection originates + (default: :attr:`oracledb.defaults.program + `) + + - ``machine``: a string recorded by Oracle Database as the name of the + machine from which the connection originates + (default: :attr:`oracledb.defaults.machine + `) + + - ``terminal``: a string recorded by Oracle Database as the terminal + identifier from which the connection originates + (default: :attr:`oracledb.defaults.terminal + `) + + - ``osuser``: a string recorded by Oracle Database as the operating + system user who originated the connection + (default: :attr:`oracledb.defaults.osuser + `) + + - ``driver_name``: a string recorded by Oracle Database as the name of + the driver which originated the connection + (default: :attr:`oracledb.defaults.driver_name + `) + + - ``use_sni``: a boolean indicating whether to use the TLS SNI + extension to bypass the second TLS neogiation that would otherwise be + required + (default: False) + + - ``thick_mode_dsn_passthrough``: a boolean indicating whether to pass + the connect string to the Oracle Client libraries unchanged without + parsing by the driver. Setting this to False makes python-oracledb + Thick and Thin mode applications behave similarly regarding + connection string parameter handling and locating any optional + tnsnames.ora configuration file + (default: :attr:`oracledb.defaults.thick_mode_dsn_passthrough + `) + + - ``extra_auth_params``: a dictionary containing configuration + parameters necessary for Oracle Database authentication using + plugins, such as the Azure and OCI cloud-native authentication + plugins + (default: None) + + - ``pool_name``: the name of the DRCP pool when using multi-pool DRCP + with Oracle Database 23.4, or higher + (default: None) + + - ``handle``: an integer representing a pointer to a valid service + context handle. This value is only used in python-oracledb Thick + mode. It should be used with extreme caution + (default: 0) + """ + pass + + def __repr__(self): + return ( + self.__class__.__qualname__ + "(" + f"user={self.user!r}, " + f"proxy_user={self.proxy_user!r}, " + f"host={self.host!r}, " + f"port={self.port!r}, " + f"protocol={self.protocol!r}, " + f"https_proxy={self.https_proxy!r}, " + f"https_proxy_port={self.https_proxy_port!r}, " + f"service_name={self.service_name!r}, " + f"instance_name={self.instance_name!r}, " + f"sid={self.sid!r}, " + f"server_type={self.server_type!r}, " + f"cclass={self.cclass!r}, " + f"purity={self.purity!r}, " + f"expire_time={self.expire_time!r}, " + f"retry_count={self.retry_count!r}, " + f"retry_delay={self.retry_delay!r}, " + f"tcp_connect_timeout={self.tcp_connect_timeout!r}, " + f"ssl_server_dn_match={self.ssl_server_dn_match!r}, " + f"ssl_server_cert_dn={self.ssl_server_cert_dn!r}, " + f"wallet_location={self.wallet_location!r}, " + f"events={self.events!r}, " + f"externalauth={self.externalauth!r}, " + f"mode={self.mode!r}, " + f"disable_oob={self.disable_oob!r}, " + f"stmtcachesize={self.stmtcachesize!r}, " + f"edition={self.edition!r}, " + f"tag={self.tag!r}, " + f"matchanytag={self.matchanytag!r}, " + f"config_dir={self.config_dir!r}, " + f"appcontext={self.appcontext!r}, " + f"shardingkey={self.shardingkey!r}, " + f"supershardingkey={self.supershardingkey!r}, " + f"debug_jdwp={self.debug_jdwp!r}, " + f"connection_id_prefix={self.connection_id_prefix!r}, " + f"ssl_context={self.ssl_context!r}, " + f"sdu={self.sdu!r}, " + f"pool_boundary={self.pool_boundary!r}, " + f"use_tcp_fast_open={self.use_tcp_fast_open!r}, " + f"ssl_version={self.ssl_version!r}, " + f"program={self.program!r}, " + f"machine={self.machine!r}, " + f"terminal={self.terminal!r}, " + f"osuser={self.osuser!r}, " + f"driver_name={self.driver_name!r}, " + f"use_sni={self.use_sni!r}, " + f"thick_mode_dsn_passthrough={self.thick_mode_dsn_passthrough!r}, " + f"extra_auth_params={self.extra_auth_params!r}, " + f"pool_name={self.pool_name!r}" + ")" + ) + + def _flatten_value(f): + """ + Helper function used to flatten arrays of values if they only contain a + single item. + """ + + @functools.wraps(f) + def wrapped(self): + values = f(self) + return values if len(values) > 1 else values[0] + + return wrapped + + @property + def appcontext(self) -> list: + """ + Application context used by the connection. It should be a list of + 3-tuples (namespace, name, value) and each entry in the tuple should be + a string. + """ + return self._impl.appcontext + + @property + @_flatten_value + def cclass(self) -> Union[list, str]: + """ + The connection class to use for Database Resident Connection Pooling + (DRCP). + """ + return [d.cclass for d in self._impl.description_list.children] + + @property + def config_dir(self) -> str: + """ + A directory in which the optional tnsnames.ora configuration file is + located. This value is only used in python-oracledb Thin mode. For + python-oracledb Thick mode, it is used if + :attr:`oracledb.defaults.thick_mode_dsn_passthrough + ` is *False*. Otherwise in Thick + mode use the ``config_dir`` parameter of + :meth:`oracledb.init_oracle_client()`. + """ + return self._impl.config_dir + + @property + @_flatten_value + def connection_id_prefix(self) -> Union[list, str]: + """ + An application specific prefix that is added to the connection + identifier used for tracing. + """ + return [ + d.connection_id_prefix + for d in self._impl.description_list.children + ] + + @property + def debug_jdwp(self) -> str: + """ + A string with the format "host=;port=" that specifies the + host and port of the PL/SQL debugger. This value is only used in + python-oracledb Thin mode. For python-oracledb Thick mode set the + ORA_DEBUG_JDWP environment variable. + """ + return self._impl.debug_jdwp + + @property + def disable_oob(self) -> bool: + """ + A boolean indicating whether out-of-band breaks should be disabled. + This value is only used in python-oracledb Thin mode. It has no effect + on Windows which does not support this functionality. + """ + return self._impl.disable_oob + + @property + def driver_name(self) -> str: + """ + A string recorded by Oracle Database as the name of the driver which + originated the connection. + """ + return self._impl.driver_name + + @property + def edition(self) -> str: + """ + Edition to use for the connection. This parameter cannot be used + simultaneously with the cclass parameter. + """ + return self._impl.edition + + @property + def events(self) -> bool: + """ + A boolean specifying whether events mode should be enabled. This value + is only used in python-oracledb Thick mode and is needed for continuous + query notification and high availability event notifications. + """ + return self._impl.events + + @property + @_flatten_value + def expire_time(self) -> Union[list, int]: + """ + The number of minutes between the sending of keepalive probes. If this + parameter is set to a value greater than zero it enables keepalive. + """ + return [d.expire_time for d in self._impl.description_list.children] + + @property + def externalauth(self) -> bool: + """ + A boolean indicating whether to use external authentication. + """ + return self._impl.externalauth + + @property + def extra_auth_params(self) -> dict: + """ + A dictionary containing configuration parameters necessary for Oracle + Database authentication using plugins, such as the Azure and OCI cloud- + native authentication plugins. + """ + return self._impl.extra_auth_params + + @property + @_flatten_value + def host(self) -> Union[list, str]: + """ + The hostname or IP address of the machine hosting the database or the + database listener. + """ + return [a.host for a in self._impl._get_addresses()] + + @property + @_flatten_value + def https_proxy(self) -> Union[list, str]: + """ + The hostname or IP address of a proxy host to use for tunneling secure + connections. + """ + return [a.https_proxy for a in self._impl._get_addresses()] + + @property + @_flatten_value + def https_proxy_port(self) -> Union[list, int]: + """ + The port on which to communicate with the proxy host. + """ + return [a.https_proxy_port for a in self._impl._get_addresses()] + + @property + @_flatten_value + def instance_name(self) -> Union[list, str]: + """ + The instance name of the database. + """ + return [d.instance_name for d in self._impl.description_list.children] + + @property + def machine(self) -> str: + """ + A string recorded by Oracle Database as the name of the machine from + which the connection originates. + """ + return self._impl.machine + + @property + def matchanytag(self) -> bool: + """ + A boolean specifying whether any tag can be used when acquiring a + connection from the pool. This value is only used in python-oracledb + Thick mode. + """ + return self._impl.matchanytag + + @property + def mode(self) -> oracledb.AuthMode: + """ + The authorization mode to use. One of the constants + :data:`oracledb.AUTH_MODE_DEFAULT`, :data:`oracledb.AUTH_MODE_PRELIM`, + :data:`oracledb.AUTH_MODE_SYSASM`, :data:`oracledb.AUTH_MODE_SYSBKP`, + :data:`oracledb.AUTH_MODE_SYSDBA`, :data:`oracledb.AUTH_MODE_SYSDGD`, + :data:`oracledb.AUTH_MODE_SYSKMT`, :data:`oracledb.AUTH_MODE_SYSOPER`, + or :data:`oracledb.AUTH_MODE_SYSRAC`. + """ + return oracledb.AuthMode(self._impl.mode) + + @property + def osuser(self) -> str: + """ + A string recorded by Oracle Database as the operating system user who + originated the connection. + """ + return self._impl.osuser + + @property + @_flatten_value + def pool_boundary(self) -> Union[list, str]: + """ + One of the values "statement" or "transaction" indicating when pooled + DRCP connections can be returned to the pool. This requires the use of + DRCP with Oracle Database 23.4 or higher. + """ + return [d.pool_boundary for d in self._impl.description_list.children] + + @property + @_flatten_value + def pool_name(self) -> Union[list, str]: + """ + The name of the DRCP pool when using multi-pool DRCP with Oracle + Database 23.4, or higher. + """ + return [d.pool_name for d in self._impl.description_list.children] + + @property + @_flatten_value + def port(self) -> Union[list, int]: + """ + The port number on which the database listener is listening. + """ + return [a.port for a in self._impl._get_addresses()] + + @property + def program(self) -> str: + """ + A string recorded by Oracle Database as the program from which the + connection originates. + """ + return self._impl.program + + @property + @_flatten_value + def protocol(self) -> Union[list, str]: + """ + One of the strings "tcp" or "tcps" indicating whether to use + unencrypted network traffic or encrypted network traffic (TLS). + """ + return [a.protocol for a in self._impl._get_addresses()] + + @property + def proxy_user(self) -> str: + """ + The name of the proxy user to connect to. If this value is not + specified, it will be parsed out of user if user is in the form + "user[proxy_user]". + """ + return self._impl.proxy_user + + @property + @_flatten_value + def purity(self) -> Union[list, oracledb.Purity]: + """ + The connection purity to use for Database Resident Connection Pooling + (DRCP). + """ + return [ + oracledb.Purity(d.purity) + for d in self._impl.description_list.children + ] + + @property + @_flatten_value + def retry_count(self) -> Union[list, int]: + """ + The number of times that initial connection establishment should be + retried before the connection attempt is terminated. + """ + return [d.retry_count for d in self._impl.description_list.children] + + @property + @_flatten_value + def retry_delay(self) -> Union[list, int]: + """ + The number of seconds to wait before retrying to establish a + connection. + """ + return [d.retry_delay for d in self._impl.description_list.children] + + @property + @_flatten_value + def sdu(self) -> Union[list, int]: + """ + The requested size of the Session Data Unit (SDU), in bytes. The value + tunes internal buffers used for communication to the database. Bigger + values can increase throughput for large queries or bulk data loads, + but at the cost of higher memory use. The SDU size that will actually + be used is negotiated down to the lower of this value and the database + network SDU configuration value. + """ + return [d.sdu for d in self._impl.description_list.children] + + @property + @_flatten_value + def server_type(self) -> Union[list, str]: + """ + The type of server connection that should be established. If specified, + it should be one of strings "dedicated", "shared" or "pooled". + """ + return [d.server_type for d in self._impl.description_list.children] + + @property + @_flatten_value + def service_name(self) -> Union[list, str]: + """ + The service name of the database. + """ + return [d.service_name for d in self._impl.description_list.children] + + @property + def shardingkey(self) -> list: + """ + A list of strings, numbers, bytes or dates that identify the database + shard to connect to. This value is only used in python-oracledb Thick + mode. + """ + return self._impl.shardingkey + + @property + @_flatten_value + def sid(self) -> Union[list, str]: + """ + The system identifier (SID) of the database. Note using a service_name + instead is recommended. + """ + return [d.sid for d in self._impl.description_list.children] + + @property + def ssl_context(self) -> Any: + """ + An SSLContext object used for connecting to the database using TLS. + This SSL context will be modified to include the private key or any + certificates found in a separately supplied wallet. This parameter + should only be specified if the default SSLContext object cannot be + used. + """ + return self._impl.ssl_context + + @property + @_flatten_value + def ssl_server_cert_dn(self) -> Union[list, str]: + """ + The distinguished name (DN) which should be matched with the server. + This value is ignored if the ssl_server_dn_match parameter is not set + to the value True. If specified this value is used for any verfication. + Otherwise the hostname will be used. + """ + return [ + d.ssl_server_cert_dn for d in self._impl.description_list.children + ] + + @property + @_flatten_value + def ssl_server_dn_match(self) -> Union[list, bool]: + """ + A boolean indicating whether the server certificate distinguished name + (DN) should be matched in addition to the regular certificate + verification that is performed. Note that if the ssl_server_cert_dn + parameter is not privided, host name matching is performed instead. + """ + return [ + d.ssl_server_dn_match for d in self._impl.description_list.children + ] + + @property + @_flatten_value + def ssl_version(self) -> Union[list, ssl.TLSVersion]: + """ + One of the values ssl.TLSVersion.TLSv1_2 or ssl.TLSVersion.TLSv1_3 + indicating which TLS version to use. + """ + return [d.ssl_version for d in self._impl.description_list.children] + + @property + def stmtcachesize(self) -> int: + """ + The size of the statement cache. + """ + return self._impl.stmtcachesize + + @property + def supershardingkey(self) -> list: + """ + A list of strings, numbers, bytes or dates that identify the database + shard to connect to. This value is only used in python-oracledb Thick + mode. + """ + return self._impl.supershardingkey + + @property + def tag(self) -> str: + """ + Identifies the type of connection that should be returned from a pool. + This value is only used in python-oracledb Thick mode. + """ + return self._impl.tag + + @property + @_flatten_value + def tcp_connect_timeout(self) -> Union[list, float]: + """ + A float indicating the maximum number of seconds to wait when + establishing a connection to the database host. + """ + return [ + d.tcp_connect_timeout for d in self._impl.description_list.children + ] + + @property + def terminal(self) -> str: + """ + A string recorded by Oracle Database as the terminal identifier from + which the connection originates. + """ + return self._impl.terminal + + @property + def thick_mode_dsn_passthrough(self) -> bool: + """ + A boolean indicating whether to pass the connect string to the Oracle + Client libraries unchanged without parsing by the driver. Setting this + to False makes python-oracledb Thick and Thin mode applications behave + similarly regarding connection string parameter handling and locating + any optional tnsnames.ora configuration file. + """ + return self._impl.thick_mode_dsn_passthrough + + @property + def user(self) -> str: + """ + The name of the database user to connect to. + """ + return self._impl.user + + @property + @_flatten_value + def use_sni(self) -> Union[list, bool]: + """ + A boolean indicating whether to use the TLS SNI extension to bypass the + second TLS neogiation that would otherwise be required. + """ + return [d.use_sni for d in self._impl.description_list.children] + + @property + @_flatten_value + def use_tcp_fast_open(self) -> Union[list, bool]: + """ + A boolean indicating whether to use TCP fast open. This is an Oracle + Autonomous Database Serverless (ADB-S) specific property for clients + connecting from within OCI Cloud network. Please refer to the ADB-S + documentation for more information. + """ + return [ + d.use_tcp_fast_open for d in self._impl.description_list.children + ] + + @property + @_flatten_value + def wallet_location(self) -> Union[list, str]: + """ + The directory where the wallet can be found. In python-oracledb Thin + mode this must be the directory containing the PEM-encoded wallet file + ewallet.pem. In python-oracledb Thick mode this must be the directory + containing the file cwallet.sso. + """ + return [ + d.wallet_location for d in self._impl.description_list.children + ] + + def copy(self) -> "ConnectParams": + """ + Creates a copy of the ConnectParams instance and returns it. + """ + params = ConnectParams.__new__(ConnectParams) + params._impl = self._impl.copy() + return params + + def get_connect_string(self) -> str: + """ + Returns the connection string associated with the instance. + """ + return self._impl.get_connect_string() + + def get_network_service_names(self) -> list: + """ + Returns a list of the network service names found in the + :ref:`tnsnames.ora ` file which is inside the directory + that can be identified by the attribute + :attr:`~ConnectParams.config_dir`. If a tnsnames.ora file does not + exist, then an exception is raised. + """ + return self._impl.get_network_service_names() + + def parse_connect_string(self, connect_string: str) -> None: + """ + Parses the connect string into its components and stores the + parameters. + + The ``connect string`` parameter can be an Easy Connect string, + name-value pairs, or a simple alias which is looked up in + ``tnsnames.ora``. Parameters that are found in the connect string + override any currently stored values. + """ + self._impl.parse_connect_string(connect_string) + + def parse_dsn_with_credentials(self, dsn: str) -> tuple: + """ + Parses a DSN in the form /@ or in the + form / and returns a 3-tuple containing the parsed + user, password and connect string. Empty strings are returned as the + value *None*. + """ + return self._impl.parse_dsn_with_credentials(dsn) + + @utils.params_setter + def set( + self, + *, + user: Optional[str] = None, + proxy_user: Optional[str] = None, + password: Optional[str] = None, + newpassword: Optional[str] = None, + wallet_password: Optional[str] = None, + access_token: Optional[Union[str, tuple, Callable]] = None, + host: Optional[str] = None, + port: Optional[int] = None, + protocol: Optional[str] = None, + https_proxy: Optional[str] = None, + https_proxy_port: Optional[int] = None, + service_name: Optional[str] = None, + instance_name: Optional[str] = None, + sid: Optional[str] = None, + server_type: Optional[str] = None, + cclass: Optional[str] = None, + purity: Optional[oracledb.Purity] = None, + expire_time: Optional[int] = None, + retry_count: Optional[int] = None, + retry_delay: Optional[int] = None, + tcp_connect_timeout: Optional[float] = None, + ssl_server_dn_match: Optional[bool] = None, + ssl_server_cert_dn: Optional[str] = None, + wallet_location: Optional[str] = None, + events: Optional[bool] = None, + externalauth: Optional[bool] = None, + mode: Optional[oracledb.AuthMode] = None, + disable_oob: Optional[bool] = None, + stmtcachesize: Optional[int] = None, + edition: Optional[str] = None, + tag: Optional[str] = None, + matchanytag: Optional[bool] = None, + config_dir: Optional[str] = None, + appcontext: Optional[list] = None, + shardingkey: Optional[list] = None, + supershardingkey: Optional[list] = None, + debug_jdwp: Optional[str] = None, + connection_id_prefix: Optional[str] = None, + ssl_context: Optional[Any] = None, + sdu: Optional[int] = None, + pool_boundary: Optional[str] = None, + use_tcp_fast_open: Optional[bool] = None, + ssl_version: Optional[ssl.TLSVersion] = None, + program: Optional[str] = None, + machine: Optional[str] = None, + terminal: Optional[str] = None, + osuser: Optional[str] = None, + driver_name: Optional[str] = None, + use_sni: Optional[bool] = None, + thick_mode_dsn_passthrough: Optional[bool] = None, + extra_auth_params: Optional[dict] = None, + pool_name: Optional[str] = None, + handle: Optional[int] = None, + ): + """ + Sets the values for one or more of the parameters of a ConnectParams + object. All parameters are optional. A brief description of each + parameter follows: + + - ``user``: the name of the database user to connect to + + - ``proxy_user``: the name of the proxy user to connect to. If this + value is not specified, it will be parsed out of user if user is in + the form "user[proxy_user]" + + - ``password``: the password for the database user + + - ``newpassword``: a new password for the database user. The new + password will take effect immediately upon a successful connection to + the database + + - ``wallet_password``: the password to use to decrypt the wallet, if it + is encrypted. This is not the database password. For Oracle + Autonomous Database this is the password created when downloading the + wallet. This value is only used in python-oracledb Thin mode. + + - ``access_token``: a string, or a 2-tuple, or a callable. If it is a + string, it specifies an Entra ID OAuth2 token used for Open + Authorization (OAuth 2.0) token based authentication. If it is a + 2-tuple, it specifies the token and private key strings used for + Oracle Cloud Infrastructure (OCI) Identity and Access Management + (IAM) token based authentication. If it is a callable, it returns + either a string or a 2-tuple used for OAuth 2.0 or OCI IAM token + based authentication and is useful when the pool needs to expand and + create new connections but the current authentication token has + expired + + - ``host``: the hostname or IP address of the machine hosting the + database or the database listener + + - ``port``: the port number on which the database listener is listening + + - ``protocol``: one of the strings "tcp" or "tcps" indicating whether + to use unencrypted network traffic or encrypted network traffic (TLS) + + - ``https_proxy``: the hostname or IP address of a proxy host to use + for tunneling secure connections + + - ``https_proxy_port``: the port on which to communicate with the proxy + host + + - ``service_name``: the service name of the database + + - ``instance_name``: the instance name of the database + + - ``sid``: the system identifier (SID) of the database. Note using a + service_name instead is recommended + + - ``server_type``: the type of server connection that should be + established. If specified, it should be one of strings "dedicated", + "shared" or "pooled" + + - ``cclass``: the connection class to use for Database Resident + Connection Pooling (DRCP) + + - ``purity``: the connection purity to use for Database Resident + Connection Pooling (DRCP) + + - ``expire_time``: the number of minutes between the sending of + keepalive probes. If this parameter is set to a value greater than + zero it enables keepalive + + - ``retry_count``: the number of times that initial connection + establishment should be retried before the connection attempt is + terminated + + - ``retry_delay``: the number of seconds to wait before retrying to + establish a connection + + - ``tcp_connect_timeout``: a float indicating the maximum number of + seconds to wait when establishing a connection to the database host + + - ``ssl_server_dn_match``: a boolean indicating whether the server + certificate distinguished name (DN) should be matched in addition to + the regular certificate verification that is performed. Note that if + the ssl_server_cert_dn parameter is not privided, host name matching + is performed instead + + - ``ssl_server_cert_dn``: the distinguished name (DN) which should be + matched with the server. This value is ignored if the + ssl_server_dn_match parameter is not set to the value True. If + specified this value is used for any verfication. Otherwise the + hostname will be used + + - ``wallet_location``: the directory where the wallet can be found. In + python-oracledb Thin mode this must be the directory containing the + PEM-encoded wallet file ewallet.pem. In python-oracledb Thick mode + this must be the directory containing the file cwallet.sso + + - ``events``: a boolean specifying whether events mode should be + enabled. This value is only used in python-oracledb Thick mode and is + needed for continuous query notification and high availability event + notifications + + - ``externalauth``: a boolean indicating whether to use external + authentication + + - ``mode``: the authorization mode to use. One of the constants + :data:`oracledb.AUTH_MODE_DEFAULT`, + :data:`oracledb.AUTH_MODE_PRELIM`, :data:`oracledb.AUTH_MODE_SYSASM`, + :data:`oracledb.AUTH_MODE_SYSBKP`, :data:`oracledb.AUTH_MODE_SYSDBA`, + :data:`oracledb.AUTH_MODE_SYSDGD`, :data:`oracledb.AUTH_MODE_SYSKMT`, + :data:`oracledb.AUTH_MODE_SYSOPER`, or + :data:`oracledb.AUTH_MODE_SYSRAC` + + - ``disable_oob``: a boolean indicating whether out-of-band breaks + should be disabled. This value is only used in python-oracledb Thin + mode. It has no effect on Windows which does not support this + functionality + + - ``stmtcachesize``: the size of the statement cache + + - ``edition``: edition to use for the connection. This parameter cannot + be used simultaneously with the cclass parameter + + - ``tag``: identifies the type of connection that should be returned + from a pool. This value is only used in python-oracledb Thick mode + + - ``matchanytag``: a boolean specifying whether any tag can be used + when acquiring a connection from the pool. This value is only used in + python-oracledb Thick mode + + - ``config_dir``: a directory in which the optional tnsnames.ora + configuration file is located. This value is only used in python- + oracledb Thin mode. For python-oracledb Thick mode, it is used if + :attr:`oracledb.defaults.thick_mode_dsn_passthrough + ` is *False*. Otherwise in Thick + mode use the ``config_dir`` parameter of + :meth:`oracledb.init_oracle_client()` + + - ``appcontext``: application context used by the connection. It should + be a list of 3-tuples (namespace, name, value) and each entry in the + tuple should be a string + + - ``shardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + + - ``supershardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + + - ``debug_jdwp``: a string with the format "host=;port=" + that specifies the host and port of the PL/SQL debugger. This value + is only used in python-oracledb Thin mode. For python-oracledb Thick + mode set the ORA_DEBUG_JDWP environment variable + + - ``connection_id_prefix``: an application specific prefix that is + added to the connection identifier used for tracing + + - ``ssl_context``: an SSLContext object used for connecting to the + database using TLS. This SSL context will be modified to include the + private key or any certificates found in a separately supplied + wallet. This parameter should only be specified if the default + SSLContext object cannot be used + + - ``sdu``: the requested size of the Session Data Unit (SDU), in bytes. + The value tunes internal buffers used for communication to the + database. Bigger values can increase throughput for large queries or + bulk data loads, but at the cost of higher memory use. The SDU size + that will actually be used is negotiated down to the lower of this + value and the database network SDU configuration value + + - ``pool_boundary``: one of the values "statement" or "transaction" + indicating when pooled DRCP connections can be returned to the pool. + This requires the use of DRCP with Oracle Database 23.4 or higher + + - ``use_tcp_fast_open``: a boolean indicating whether to use TCP fast + open. This is an Oracle Autonomous Database Serverless (ADB-S) + specific property for clients connecting from within OCI Cloud + network. Please refer to the ADB-S documentation for more information + + - ``ssl_version``: one of the values ssl.TLSVersion.TLSv1_2 or + ssl.TLSVersion.TLSv1_3 indicating which TLS version to use + + - ``program``: a string recorded by Oracle Database as the program from + which the connection originates + + - ``machine``: a string recorded by Oracle Database as the name of the + machine from which the connection originates + + - ``terminal``: a string recorded by Oracle Database as the terminal + identifier from which the connection originates + + - ``osuser``: a string recorded by Oracle Database as the operating + system user who originated the connection + + - ``driver_name``: a string recorded by Oracle Database as the name of + the driver which originated the connection + + - ``use_sni``: a boolean indicating whether to use the TLS SNI + extension to bypass the second TLS neogiation that would otherwise be + required + + - ``thick_mode_dsn_passthrough``: a boolean indicating whether to pass + the connect string to the Oracle Client libraries unchanged without + parsing by the driver. Setting this to False makes python-oracledb + Thick and Thin mode applications behave similarly regarding + connection string parameter handling and locating any optional + tnsnames.ora configuration file + + - ``extra_auth_params``: a dictionary containing configuration + parameters necessary for Oracle Database authentication using + plugins, such as the Azure and OCI cloud-native authentication + plugins + + - ``pool_name``: the name of the DRCP pool when using multi-pool DRCP + with Oracle Database 23.4, or higher + + - ``handle``: an integer representing a pointer to a valid service + context handle. This value is only used in python-oracledb Thick + mode. It should be used with extreme caution + """ + pass + + def set_from_config(self, config: dict) -> None: + """ + Sets the property values based on the specified configuration. This + method is intended for use with Centralized Configuration Providers. + + The ``config`` parameter is a dictionary which consists of the + following optional keys: "connect_descriptor", "user", "password", and + "pyo". + + If the key "connect_descriptor" is specified, it is expected to be a + string, which will be parsed and the properties found within it are + stored in the ConnectParams instance. + + If the keys "user" or "password" are specified, and the parameters do + not already have a user or password set, these values will be stored; + otherwise, they will be ignored. The key "user" is expected to be a + string. The "key" password may be a string or it may be a dictionary + which will be examined by a :ref:`registered password type handler + ` to determine the actual password. + + If the key "pyo" is specified, it is expected to be a dictionary + containing keys corresponding to property names. Any property names + accepted by the ConnectParams class will be stored in the ConnectParams + instance; all other values will be ignored. + """ + self._impl.set_from_config(config) diff --git a/.venv/lib/python3.9/site-packages/oracledb/connection.py b/.venv/lib/python3.9/site-packages/oracledb/connection.py new file mode 100644 index 0000000..310e8ba --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/connection.py @@ -0,0 +1,3320 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2020, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# connection.py +# +# Contains the Connection class and the factory method connect() used for +# establishing connections to the database. +# +# *** NOTICE *** This file is generated from a template and should not be +# modified directly. See build_from_template.py in the utils subdirectory for +# more information. +# ----------------------------------------------------------------------------- + +import collections +import functools +import ssl +from typing import Any, Callable, Iterator, Type, Optional, Union + +import oracledb + +from . import base_impl, driver_mode, errors, thick_impl, thin_impl +from . import pool as pool_module +from .aq import AsyncQueue, Queue, MessageProperties +from .arrow_impl import ArrowSchemaImpl +from .base import BaseMetaClass +from .base_impl import DB_TYPE_BLOB, DB_TYPE_CLOB, DB_TYPE_NCLOB, DbType +from .connect_params import ConnectParams +from .cursor import AsyncCursor, Cursor +from .dataframe import DataFrame +from .dbobject import DbObjectType, DbObject +from .lob import AsyncLOB, LOB +from .pipeline import Pipeline, PipelineOpResult +from .soda import SodaDatabase +from .subscr import Subscription +from .utils import normalize_sessionless_transaction_id + +# named tuple used for representing global transactions +Xid = collections.namedtuple( + "Xid", ["format_id", "global_transaction_id", "branch_qualifier"] +) + + +class BaseConnection(metaclass=BaseMetaClass): + _impl = None + + def __init__(self): + self._version = None + + def __repr__(self): + cls_name = self.__class__._public_name + if self._impl is None: + return f"<{cls_name} disconnected>" + elif self.username is None: + return f"<{cls_name} to externally identified user>" + return f"<{cls_name} to {self.username}@{self.dsn}>" + + def _verify_connected(self) -> None: + """ + Verifies that the connection is connected to the database. If it is + not, an exception is raised. + """ + if self._impl is None: + errors._raise_err(errors.ERR_NOT_CONNECTED) + + def _verify_xid(self, xid: Xid) -> None: + """ + Verifies that the supplied xid is of the correct type. + """ + if not isinstance(xid, Xid): + message = "expecting transaction id created with xid()" + raise TypeError(message) + + @property + def action(self) -> str: + """ + This write-only attribute sets the ACTION column in the V$SESSION view. + It is a string attribute but the value *None* is accepted and treated + as an empty string. + """ + raise AttributeError("action is not readable") + + @action.setter + def action(self, value: str) -> None: + self._verify_connected() + self._impl.set_action(value) + + @property + def autocommit(self) -> bool: + """ + This read-write attribute determines whether autocommit mode is on or + off. When autocommit mode is on, all statements are committed as soon + as they have completed executing. + """ + self._verify_connected() + return self._impl.autocommit + + @autocommit.setter + def autocommit(self, value: bool) -> None: + self._verify_connected() + self._impl.autocommit = value + + def begin_sessionless_transaction( + self, + transaction_id: Optional[Union[str, bytes]] = None, + timeout: int = 60, + defer_round_trip: bool = False, + ) -> bytes: + """ + Begins a new sessionless transaction. This method returns the + transaction identifier specified by the user or generated by + python-oracledb. + + The ``transaction_id`` parameter should be of type string or bytes. If + specified, it represents a unique identifier for the transaction. If a + string is passed, then it will be UTF-8 encoded to bytes. If this value + is not specified, then python-oracledb generates a a random + `universally-unique identifier (UUID) + `__ value. An example is + "36b8f84d-df4e-4d49-b662-bcde71a8764f". Any user-chosen value cannot + exceed 64 bytes in length. + + The ``timeout`` parameter is the number of seconds that this + transaction can stay suspended when + :meth:`suspend_sessionless_transaction()` is later called, + or if the transaction is automatically suspended when the + ``suspend_on_success`` parameter is set to to *True* in + :meth:`Cursor.execute()` or :meth:`Cursor.executemany()`. The default + value is *60* seconds. If a transaction is not resumed within this + specified duration, the transaction will be rolled back. + + The ``defer_round_trip`` parameter is a boolean that determines whether + the request to start a transaction is to be sent immediately or with + the next database operation. If set to *False*, the request is sent + immediately. If set to *True*, the request is included with the next + database operation on the connection. The default value is *False*. + """ + self._verify_connected() + normalized_txnid = normalize_sessionless_transaction_id(transaction_id) + + if not isinstance(timeout, int) or timeout <= 0: + raise TypeError("timeout must be a positive integer") + + self._impl.begin_sessionless_transaction( + normalized_txnid, timeout, defer_round_trip + ) + return normalized_txnid + + @property + def call_timeout(self) -> int: + """ + This read-write attribute specifies the amount of time (in + milliseconds) that a single round-trip to the database may take before + a timeout will occur. A value of *0* means that no timeout will take + place. + + In python-oracledb Thick mode, this attribute is only available in + Oracle Client 18c or later. + + If a timeout occurs, the error ``DPI-1067`` will be returned if the + connection is still usable. Alternatively the error ``DPI-1080`` will + be returned if the connection has become invalid and can no longer be + used. + + For consistency and compliance with the PEP 8 naming style, the + attribute ``callTimeout`` was renamed to ``call_timeout``. The old name + will continue to work for a period of time. The error ``DPI-1080`` was + also introduced in this release. + """ + self._verify_connected() + return self._impl.get_call_timeout() + + @call_timeout.setter + def call_timeout(self, value: int) -> None: + self._verify_connected() + self._impl.set_call_timeout(value) + + def cancel(self) -> None: + """ + Breaks a long-running statement. + """ + self._verify_connected() + self._impl.cancel() + + @property + def client_identifier(self) -> str: + """ + This write-only attribute sets the CLIENT_IDENTIFIER column in the + V$SESSION view. + """ + raise AttributeError("client_identifier is not readable") + + @client_identifier.setter + def client_identifier(self, value: str) -> None: + self._verify_connected() + self._impl.set_client_identifier(value) + + @property + def clientinfo(self) -> str: + """ + This write-only attribute sets the CLIENT_INFO column in the V$SESSION + view. + """ + raise AttributeError("clientinfo is not readable") + + @clientinfo.setter + def clientinfo(self, value: str) -> None: + """ + Specifies the client_info column in the v$session table. + """ + self._verify_connected() + self._impl.set_client_info(value) + + @property + def current_schema(self) -> str: + """ + This read-write attribute sets the current schema attribute for the + session. Setting this value is the same as executing the SQL statement + ``ALTER SESSION SET CURRENT_SCHEMA``. The attribute is set (and + verified) on the next call that does a round trip to the server. The + value is placed before unqualified database objects in SQL statements + you then execute. + """ + self._verify_connected() + return self._impl.get_current_schema() + + @current_schema.setter + def current_schema(self, value: str) -> None: + self._verify_connected() + self._impl.set_current_schema(value) + + @property + def db_domain(self) -> str: + """ + This read-only attribute specifies the Oracle Database domain name + associated with the connection. It is the same value returned by the + SQL ``SELECT value FROM V$PARAMETER WHERE NAME = 'db_domain'``. + """ + self._verify_connected() + return self._impl.get_db_domain() + + @property + def db_name(self) -> str: + """ + This read-only attribute specifies the Oracle Database name associated + with the connection. It is the same value returned by the SQL ``SELECT + NAME FROM V$DATABASE``. + """ + self._verify_connected() + return self._impl.get_db_name() + + @property + def dbop(self) -> str: + """ + This write-only attribute sets the database operation that is to be + monitored. This can be viewed in the DBOP_NAME column of the + V$SQL_MONITOR view. + """ + raise AttributeError("dbop is not readable") + + @dbop.setter + def dbop(self, value: str) -> None: + self._verify_connected() + self._impl.set_dbop(value) + + def decode_oson(self, data: bytes) -> Any: + """ + Decodes `OSON-encoded + `__ bytes and returns the + object encoded in those bytes. This is useful for fetching columns + which have the check constraint ``IS JSON FORMAT OSON`` enabled. + """ + self._verify_connected() + return self._impl.decode_oson(data) + + @property + def dsn(self) -> str: + """ + This read-only attribute returns the TNS entry of the database to which + a connection has been established. + """ + self._verify_connected() + return self._impl.dsn + + @property + def econtext_id(self) -> str: + """ + This write-only attribute specifies the execution context id. This + value can be found as the ECID column in the V$SESSION view and + ECONTEXT_ID in the auditing tables. The maximum length is 64 bytes. + """ + raise AttributeError("econtext_id is not readable") + + @econtext_id.setter + def econtext_id(self, value: str) -> None: + self._verify_connected() + self._impl.set_econtext_id(value) + + @property + def edition(self) -> str: + """ + This read-only attribute gets the session edition and is only available + with Oracle Database 11.2, or later. + """ + self._verify_connected() + return self._impl.get_edition() + + def encode_oson(self, value: Any) -> bytes: + """ + Encodes a Python value into `OSON-encoded + `__ bytes and returns + them. This is useful for inserting into columns which have the check + constraint ``IS JSON FORMAT OSON`` enabled. + """ + self._verify_connected() + return self._impl.encode_oson(value) + + @property + def external_name(self) -> str: + """ + This read-write attribute specifies the external name that is used by + the connection when logging distributed transactions. + """ + self._verify_connected() + return self._impl.get_external_name() + + @external_name.setter + def external_name(self, value: str) -> None: + self._verify_connected() + self._impl.set_external_name(value) + + @property + def inputtypehandler(self) -> Callable: + """ + This read-write attribute specifies a method called for each value that + is bound to a statement executed on any cursor associated with this + connection. The method signature is handler(cursor, value, arraysize) + and the return value is expected to be a variable object or *None* in + which case a default variable object will be created. If this attribute + is *None*, the default behavior will take place for all values bound to + statements. + """ + self._verify_connected() + return self._impl.inputtypehandler + + @inputtypehandler.setter + def inputtypehandler(self, value: Callable) -> None: + self._verify_connected() + self._impl.inputtypehandler = value + + @property + def instance_name(self) -> str: + """ + This read-only attribute specifies the Oracle Database instance name + associated with the connection. It is the same value as the SQL + expression ``sys_context('userenv', 'instance_name')``. + """ + self._verify_connected() + return self._impl.get_instance_name() + + @property + def internal_name(self) -> str: + """ + This read-write attribute specifies the internal name that is used by + the connection when logging distributed transactions. + """ + self._verify_connected() + return self._impl.get_internal_name() + + @internal_name.setter + def internal_name(self, value: str) -> None: + self._verify_connected() + self._impl.set_internal_name(value) + + def is_healthy(self) -> bool: + """ + This function returns a boolean indicating the health status of a + connection. + + Connections may become unusable in several cases, such as, if the + network socket is broken, if an Oracle error indicates the connection + is unusable, or, after receiving a planned down notification from the + database. + + This function is best used before starting a new database request on an + existing :ref:`standalone connections `. For + pooled connections, the :meth:`ConnectionPool.acquire()` method + internally performs this check before returning a connection to the + application, see :ref:`poolhealth`. + + If this function returns *False*, the connection should be not be used + by the application and a new connection should be established instead. + + This function performs a local check. To fully check a connection's + health, use :meth:`ping()` which performs a round-trip to + the database. + """ + return self._impl is not None and self._impl.get_is_healthy() + + @property + def ltxid(self) -> bytes: + """ + This read-only attribute returns the logical transaction id for the + connection. It is used within Oracle Transaction Guard as a means of + ensuring that transactions are not duplicated. See :ref:`tg` for more + information. + + This is only available with Oracle Database 12.1 or later. In + python-oracledb Thick mode, it also requires Oracle Client libraries + 12.1 or later. + """ + self._verify_connected() + return self._impl.get_ltxid() + + @property + def max_identifier_length(self) -> int: + """ + This read-only attribute specifies the maximum database identifier + length in bytes supported by the database to which the connection has + been established. See `Database Object Naming Rules + `__. The value may be + *None*, *30*, or *128*. The value *None* indicates the size cannot be + reliably determined by python-oracledb, which occurs when using Thick + mode with Oracle Client libraries 12.1 (or older) to connect to Oracle + Database 12.2, or later. + """ + self._verify_connected() + return self._impl.get_max_identifier_length() + + @property + def max_open_cursors(self) -> int: + """ + This read-only attribute specifies the maximum number of cursors that + the database can have open concurrently. It is the same value returned + by the SQL ``SELECT VALUE FROM V$PARAMETER WHERE NAME = + 'open_cursors'``. When using python-oracledb Thick mode, Oracle Client + libraries 12.1 (or later) are required. + """ + self._verify_connected() + return self._impl.get_max_open_cursors() + + @property + def module(self) -> str: + """ + This write-only attribute sets the MODULE column in the V$SESSION view. + The maximum length for this string is 48 and if you exceed this length + you will get ``ORA-24960``. + """ + raise AttributeError("module is not readable") + + @module.setter + def module(self, value: str) -> None: + self._verify_connected() + self._impl.set_module(value) + + def msgproperties( + self, + payload: Optional[Union[bytes, str, DbObject]] = None, + correlation: Optional[str] = None, + delay: Optional[int] = None, + exceptionq: Optional[str] = None, + expiration: Optional[int] = None, + priority: Optional[int] = None, + recipients: Optional[list] = None, + ) -> MessageProperties: + """ + Returns an object specifying the properties of messages used in + advanced queuing. + + Each of the parameters are optional. If specified, they act as a + shortcut for setting each of the equivalently named properties. + """ + impl = self._impl.create_msg_props_impl() + props = MessageProperties._from_impl(impl) + if payload is not None: + props.payload = payload + if correlation is not None: + props.correlation = correlation + if delay is not None: + props.delay = delay + if exceptionq is not None: + props.exceptionq = exceptionq + if expiration is not None: + props.expiration = expiration + if priority is not None: + props.priority = priority + if recipients is not None: + props.recipients = recipients + return props + + def queue( + self, + name: str, + payload_type: Optional[Union[DbObjectType, str]] = None, + *, + payloadType: Optional[DbObjectType] = None, + ) -> Union[Queue, AsyncQueue]: + """ + Creates a queue which is used to enqueue and dequeue messages in + Advanced Queuing. + + The ``name`` parameter is expected to be a string identifying the queue + in which messages are to be enqueued or dequeued. + + The ``payload_type`` parameter, if specified, is expected to be an + :ref:`object type ` that identifies the type of payload + the queue expects. If the string "JSON" is specified, JSON data is + enqueued and dequeued. If not specified, RAW data is enqueued and + dequeued. + + For consistency and compliance with the PEP 8 naming style, the + parameter ``payloadType`` was renamed to ``payload_type``. The old name + will continue to work as a keyword parameter for a period of time. + """ + self._verify_connected() + payload_type_impl = None + is_json = False + if payloadType is not None: + if payload_type is not None: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="payloadType", + new_name="payload_type", + ) + payload_type = payloadType + if payload_type is not None: + if payload_type == "JSON": + is_json = True + elif not isinstance(payload_type, DbObjectType): + raise TypeError("expecting DbObjectType") + else: + payload_type_impl = payload_type._impl + impl = self._impl.create_queue_impl() + impl.initialize(self._impl, name, payload_type_impl, is_json) + return self._create_queue(impl) + + @property + def outputtypehandler(self) -> Callable: + """ + This read-write attribute specifies a method called for each column + that is going to be fetched from any cursor associated with this + connection. The method signature is ``handler(cursor, metadata)`` and + the return value is expected to be a :ref:`variable object` or + *None* in which case a default variable object will be created. If this + attribute is *None*, the default behavior will take place for all + columns fetched from cursors. + """ + self._verify_connected() + return self._impl.outputtypehandler + + @outputtypehandler.setter + def outputtypehandler(self, value: Callable) -> None: + self._verify_connected() + self._impl.outputtypehandler = value + + @property + def proxy_user(self) -> Union[str, None]: + """ + This read-only attribute returns the name of the user which was used as + a proxy when creating the connection to the database. + """ + self._verify_connected() + return self._impl.proxy_user + + def resume_sessionless_transaction( + self, + transaction_id: Union[str, bytes], + timeout: int = 60, + defer_round_trip: bool = False, + ) -> bytes: + """ + Resumes an existing sessionless transaction using the specified + transaction identifier. This method returns the transaction identifier + used to resume the sessionless transaction. + + The ``transaction_id`` parameter should be a string or bytes value that + uniquely identifies an existing sessionless transaction that is to be + resumed. + + The ``timeout`` parameter is the number of seconds that the current + connection waits to resume a transaction if another connection is using + it. When ``defer_round_trip`` is set to *False*, the wait happens in + the ``resume_sessionless_transaction()`` call itself, and the function + blocks until the transaction becomes available or the timeout expires. + When ``defer_round_trip`` is set to *True*, the resume is deferred and + the wait occurs at the time of the next database operation instead. At + the start of the wait period, if the transaction is not in use by any + other connection, the resume happens immediately. If the transaction + remains in use by the other connection after the timeout period, the + error `ORA-25351 + `__ is raised. If + another connection completes the transaction, the error `ORA-24756 + `__ is raised. + These error messages are only thrown for non-RAC instances. For + information on using Oracle RAC, see :ref:`Sessionless Transactions + with Oracle RAC `. The default value is *60* + seconds. + + The ``defer_round_trip`` parameter is a boolean that determines whether + the request to resume a transaction is to be sent immediately or with + the next database operation. If set to *False*, the request is sent + immediately. If set to *True*, the request is included with the next + database operation on the connection. The default value is *False*. + """ + self._verify_connected() + if transaction_id is None: + raise ValueError("transaction_id is required for resuming") + + normalized_txnid = normalize_sessionless_transaction_id(transaction_id) + + if not (isinstance(timeout, int) and timeout >= 0): + raise TypeError("timeout must be a non-negative integer") + + self._impl.resume_sessionless_transaction( + normalized_txnid, timeout, defer_round_trip + ) + return normalized_txnid + + @property + def sdu(self) -> int: + """ + This read-only attribute specifies the size of the Session Data Unit + (SDU) that is being used by the connection. The value will be the + lesser of the requested python-oracledb size and the maximum size + allowed by the database network configuration. It is available only in + python-oracledb Thin mode. To set the SDU in Thick mode, use a + connection string SDU parameter or set a value for DEFAULT_SDU_SIZE in + a sqlnet.ora configuration file. + """ + self._verify_connected() + return self._impl.get_sdu() + + @property + def serial_num(self) -> int: + """ + This read-only attribute specifies the session serial number associated + with the connection. It is the same value returned by the SQL ``SELECT + SERIAL# FROM V$SESSION WHERE SID=SYS_CONTEXT('USERENV', 'SID')``. It + is available only in python-oracledb Thin mode. + + For applications using :ref:`drcp`, the ``serial_num`` attribute may + not contain the current session state until a round-trip is made to the + database after acquiring a session. It is recommended to not use this + attribute if your application uses DRCP but may not perform a + round-trip. + """ + self._verify_connected() + return self._impl.get_serial_num() + + @property + def service_name(self) -> str: + """ + This read-only attribute specifies the Oracle Database service name + associated with the connection. This is the same value returned by the + SQL ``SELECT SYS_CONTEXT('USERENV', 'SERVICE_NAME') FROM DUAL``. + """ + self._verify_connected() + return self._impl.get_service_name() + + @property + def session_id(self) -> int: + """ + This read-only attribute specifies the session identifier associated + with the connection. It is the same value returned by the SQL ``SELECT + SYS_CONTEXT('USERENV', 'SID') FROM DUAL``. It is available only in + python-oracledb Thin mode. + + For applications using :ref:`drcp`, the ``session_id`` attribute may + not contain the current session state until a round-trip is made to the + database after acquiring a session. It is recommended to not use this + attribute if your application uses DRCP but may not perform a + round-trip. + """ + self._verify_connected() + return self._impl.get_session_id() + + @property + def stmtcachesize(self) -> int: + """ + This read-write attribute specifies the size of the statement cache. + This value can make a significant difference in performance if you have + a small number of statements that you execute repeatedly. + + The default value is *20*. + """ + self._verify_connected() + return self._impl.get_stmt_cache_size() + + @stmtcachesize.setter + def stmtcachesize(self, value: int) -> None: + self._verify_connected() + self._impl.set_stmt_cache_size(value) + + @property + def tag(self) -> str: + """ + This read-write attribute initially contains the actual tag of the + session that was acquired from a pool by + :meth:`ConnectionPool.acquire()`. If the connection was not acquired + from a pool or no tagging parameters were specified (``tag`` and + ``matchanytag``) when the connection was acquired from the pool, this + value will be None. If the value is changed, it must be a string + containing name=value pairs like "k1=v1;k2=v2". + + If this value is not *None* when the connection is released back to the + pool it will be used to retag the session. This value can be overridden + in the call to :meth:`ConnectionPool.release()`. + """ + self._verify_connected() + return self._impl.tag + + @tag.setter + def tag(self, value: str) -> None: + self._verify_connected() + self._impl.tag = value + + @property + def thin(self) -> bool: + """ + This read-only attribute returns a boolean indicating if + python-oracledb is in Thin mode (*True*) or Thick mode (*False*). + """ + self._verify_connected() + return self._impl.thin + + @property + def transaction_in_progress(self) -> bool: + """ + This read-only attribute specifies whether a transaction is currently + in progress on the database associated with the connection. + """ + self._verify_connected() + return self._impl.get_transaction_in_progress() + + @property + def username(self) -> str: + """ + This read-only attribute returns the name of the user which established + the connection to the database. + """ + self._verify_connected() + return self._impl.username + + @property + def version(self) -> str: + """ + This read-only attribute returns the version of the database to which a + connection has been established. + """ + if self._version is None: + self._verify_connected() + self._version = ".".join(str(c) for c in self._impl.server_version) + return self._version + + @property + def warning(self) -> Union[errors._Error, None]: + """ + This read-only attribute provides an + :ref:`oracledb._Error` object giving information about any + database warnings (such as the password being in the grace period, or + the pool being created with a smaller than requested size due to + database resource restrictions) that were generated during connection + establishment or by :meth:`oracledb.create_pool()`. The attribute will + be present if there was a warning, but creation otherwise completed + successfully. The connection will be usable despite the warning. + + For :ref:`standalone connections `, + ``Connection.warning`` will be present for the lifetime of the + connection. + + For :ref:`pooled connections `, ``Connection.warning`` + will be cleared when a connection is released to the pool such as with + :meth:`ConnectionPool.release()`. + + In python-oracledb Thick mode, warnings may be generated during pool + creation itself. These warnings will be placed on new connections + created by the pool, provided no warnings were generated by the + individual connection creations, in which case those connection + warnings will be returned. + + If no warning was generated the value *None* is returned. + """ + self._verify_connected() + return self._impl.warning + + def xid( + self, + format_id: int, + global_transaction_id: Union[bytes, str], + branch_qualifier: Union[bytes, str], + ) -> Xid: + """ + Returns a global transaction identifier (xid) that can be used with the + Two-Phase Commit (TPC) functions. + + The ``xid`` contains a format identifier, a global transaction + identifier, and a branch identifier. There are no checks performed at + the Python level. The values are checked by ODPI-C when they are passed + to the relevant functions. .. When this functionality is also + supported in the thin driver the checks will be performed at the Python + level as well. + + The ``format_id`` parameter should be a positive 32-bit integer. This + value identifies the format of the ``global_transaction_id`` and + ``branch_qualifier`` parameters and the value is determined by the + Transaction Manager (TM), if one is in use. + + The ``global_transaction_id`` and ``branch_qualifier`` parameters + should be of type bytes or string. If a value of type string is passed, + then this value will be UTF-8 encoded to bytes. The values cannot + exceed 64 bytes in length. + """ + return Xid(format_id, global_transaction_id, branch_qualifier) + + +class Connection(BaseConnection): + + def __init__( + self, + dsn: Optional[str] = None, + *, + pool: Optional["pool_module.ConnectionPool"] = None, + params: Optional[ConnectParams] = None, + **kwargs, + ) -> None: + """ + Constructor for creating a connection to the database. + """ + + super().__init__() + self._pool = pool + + # determine if thin mode is being used + with driver_mode.get_manager() as mode_mgr: + thin = mode_mgr.thin + + # determine which connection parameters to use + if params is None: + params_impl = base_impl.ConnectParamsImpl() + elif not isinstance(params, ConnectParams): + errors._raise_err(errors.ERR_INVALID_CONNECT_PARAMS) + else: + params_impl = params._impl.copy() + dsn = params_impl.process_args(dsn, kwargs, thin) + + # see if connection is being acquired from a pool + if pool is None: + pool_impl = None + else: + pool._verify_open() + pool_impl = pool._impl + + # create thin or thick implementation object + if thin: + if ( + params_impl.shardingkey is not None + or params_impl.supershardingkey is not None + ): + errors._raise_err( + errors.ERR_FEATURE_NOT_SUPPORTED, + feature="sharding", + driver_type="thick", + ) + if pool is not None: + impl = pool_impl.acquire(params_impl) + else: + impl = thin_impl.ThinConnImpl(dsn, params_impl) + impl.connect(params_impl) + else: + impl = thick_impl.ThickConnImpl(dsn, params_impl) + impl.connect(params_impl, pool_impl) + self._impl = impl + + # invoke callback, if applicable + if ( + impl.invoke_session_callback + and pool is not None + and pool.session_callback is not None + and callable(pool.session_callback) + ): + pool.session_callback(self, params_impl.tag) + impl.invoke_session_callback = False + + def __del__(self): + if self._impl is not None: + self._close(in_del=True) + + def __enter__(self): + """ + The entry point for the connection as a context manager. It returns + itself. + """ + self._verify_connected() + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + """ + The exit point for the connection as a context manager. This will close + the connection and roll back any uncommitted transaction. + """ + if self._impl is not None: + self._close() + + def _close(self, in_del=False): + """ + Closes the connection and makes it unusable for further operations. An + Error exception will be raised if any operation is attempted with this + connection after this method completes successfully. + """ + if self._pool is not None: + pool_impl = self._pool._impl + if pool_impl is not None: + pool_impl.return_connection(self._impl, in_del) + else: + self._impl.close(in_del) + self._impl = None + + def _create_queue(self, impl): + """ + Returns a queue object that the user can use to dequeue and enqueue + messages. + """ + return Queue._from_impl(self, impl) + + def _get_oci_attr( + self, handle_type: int, attr_num: int, attr_type: int + ) -> Any: + """ + Returns the value of the specified OCI attribute from the internal + handle. This is only supported in python-oracledb thick mode and should + only be used as directed by Oracle. + """ + self._verify_connected() + return self._impl._get_oci_attr(handle_type, attr_num, attr_type) + + def _set_oci_attr( + self, handle_type: int, attr_num: int, attr_type: int, value: Any + ) -> None: + """ + Sets the value of the specified OCI attribute on the internal handle. + This is only supported in python-oracledb thick mode and should only + be used as directed by Oracle. + """ + self._verify_connected() + self._impl._set_oci_attr(handle_type, attr_num, attr_type, value) + + def begin( + self, + format_id: int = -1, + transaction_id: str = "", + branch_id: str = "", + ) -> None: + """ + Explicitly begins a new transaction. Without parameters, this + explicitly begins a local transaction; otherwise, this explicitly + begins a distributed (global) transaction with the given parameters. + See the Oracle documentation for more details. + + Note that in order to make use of global (distributed) transactions, + the :attr:`~Connection.internal_name` and + :attr:`~Connection.external_name` attributes must be set. + """ + if format_id != -1: + self.tpc_begin(self.xid(format_id, transaction_id, branch_id)) + + @property + def callTimeout(self) -> int: + """ + Deprecated. Use property call_timeout instead. + """ + return self.call_timeout + + @callTimeout.setter + def callTimeout(self, value: int) -> None: + self._verify_connected() + self._impl.set_call_timeout(value) + + def changepassword(self, old_password: str, new_password: str) -> None: + """ + Changes the password for the user to which the connection is connected. + """ + self._verify_connected() + self._impl.change_password(old_password, new_password) + + def close(self) -> None: + """ + Closes the connection now and makes it unusable for further operations. + An Error exception will be raised if any operation is attempted with + this connection after this method is completed successfully. + + All open cursors and LOBs created by the connection will be closed and + will also no longer be usable. + + Internally, references to the connection are held by cursor objects, + LOB objects, subscription objects, etc. Once all of these references + are released, the connection itself will be closed automatically. + Either control references to these related objects carefully or + explicitly close connections in order to ensure sufficient resources + are available. + """ + self._verify_connected() + self._close() + + def commit(self) -> None: + """ + Commits any pending transactions to the database. + """ + self._verify_connected() + self._impl.commit() + + def createlob( + self, lob_type: DbType, data: Optional[Union[str, bytes]] = None + ) -> LOB: + """ + Creates and returns a new temporary LOB object of the specified type. + The ``lob_type`` parameter should be one of + :data:`oracledb.DB_TYPE_CLOB`, :data:`oracledb.DB_TYPE_BLOB`, or + :data:`oracledb.DB_TYPE_NCLOB`. + + If data is supplied, it will be written to the temporary LOB before it + is returned. + """ + self._verify_connected() + if lob_type not in (DB_TYPE_CLOB, DB_TYPE_NCLOB, DB_TYPE_BLOB): + message = ( + "parameter should be one of oracledb.DB_TYPE_CLOB, " + "oracledb.DB_TYPE_BLOB or oracledb.DB_TYPE_NCLOB" + ) + raise TypeError(message) + impl = self._impl.create_temp_lob_impl(lob_type) + lob = LOB._from_impl(impl) + if data: + lob.write(data) + return lob + + def cursor(self, scrollable: bool = False) -> Cursor: + """ + Returns a new :ref:`cursor object ` using the connection. + """ + self._verify_connected() + return Cursor(self, scrollable) + + def direct_path_load( + self, + schema_name: str, + table_name: str, + column_names: list[str], + data: Any, + *, + batch_size: int = 2**32 - 1, + ) -> None: + """ + Load data into Oracle Database using the Direct Path Load interface. + It is available only in python-oracledb Thin mode. + + The ``data`` parameter can be a list of sequences, a DataFrame, or a + third-party DataFrame instance that supports the Apache Arrow PyCapsule + Interface. + + The ``batch_size`` parameter is used to split large data sets into + smaller pieces for sending to the database. It is the number of records + in each batch. This parameter can be used to tune performance. + """ + self._verify_connected() + self._impl.direct_path_load( + schema_name, table_name, column_names, data, batch_size + ) + + def fetch_df_all( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + arraysize: Optional[int] = None, + *, + fetch_decimals: Optional[bool] = None, + requested_schema: Optional[Any] = None, + ) -> DataFrame: + """ + Fetches all rows of the SQL query ``statement``, returning them in a + :ref:`DataFrame ` object. An empty DataFrame is + returned if there are no rows available. + + The ``parameters`` parameter can be a list of tuples, where each tuple + item maps to one :ref:`bind variable placeholder ` in + ``statement``. It can also be a list of dictionaries, where the keys + match the bind variable placeholder names in ``statement``. + + The ``arraysize`` parameter can be specified to tune performance of + fetching data across the network. It defaults to + :attr:`oracledb.defaults.arraysize `. Internally, + the ``fetch_df_all()``'s :attr:`Cursor.prefetchrows` size is always set + to the value of the explicit or default ``arraysize`` parameter value. + + The ``fetch_decimals`` parameter specifies whether to return decimal + values when fetching columns of type ``NUMBER`` that are capable of + being represented in Apache Arrow Decimal128 format. The default value + is :data:`oracledb.defaults.fetch_decimals `. + + The ``requested_schema`` parameter specifies an object that implements + the Apache Arrow PyCapsule schema interface. The DataFrame returned by + ``fetch_df_all()`` will have the data types and names of the schema. + + Any LOB fetched must be less than 1 GB. + """ + cursor = self.cursor() + cursor._impl.fetching_arrow = True + if requested_schema is not None: + cursor._impl.schema_impl = ArrowSchemaImpl.from_arrow_schema( + requested_schema + ) + if arraysize is not None: + cursor.arraysize = arraysize + cursor.prefetchrows = cursor.arraysize + cursor.execute( + statement, + parameters, + fetch_decimals=fetch_decimals, + ) + return cursor._impl.fetch_df_all(cursor) + + def fetch_df_batches( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + size: Optional[int] = None, + *, + fetch_decimals: Optional[bool] = None, + requested_schema: Optional[Any] = None, + ) -> Iterator[DataFrame]: + """ + This returns an iterator yielding the next ``size`` rows of the SQL + query ``statement`` in each iteration as a :ref:`DataFrame + ` object. An empty DataFrame is returned if there + are no rows available. + + The ``parameters`` parameter can be a list of tuples, where each tuple + item maps to one :ref:`bind variable placeholder ` in + ``statement``. It can also be a list of dictionaries, where the keys + match the bind variable placeholder names in ``statement``. + + The ``size`` parameter controls the number of records fetched in each + batch. It defaults to + :attr:`oracledb.defaults.arraysize `. Internally, + the ``fetch_df_batches()``'s :attr:`Cursor.arraysize` and + :attr:`Cursor.prefetchrows` sizes are always set to the value of the + explicit or default ``size`` parameter value. + + The ``fetch_decimals`` parameter specifies whether to return decimal + values when fetching columns of type ``NUMBER`` that are capable of + being represented in Apache Arrow Decimal128 format. The default value + is :data:`oracledb.defaults.fetch_decimals `. + + The ``requested_schema`` parameter specifies an object that implements + the Apache Arrow PyCapsule schema interface. The DataFrame returned by + ``fetch_df_all()`` will have the data types and names of the schema. + + Any LOB fetched must be less than 1 GB. + """ + cursor = self.cursor() + cursor._impl.fetching_arrow = True + if requested_schema is not None: + cursor._impl.schema_impl = ArrowSchemaImpl.from_arrow_schema( + requested_schema + ) + if size is not None: + cursor.arraysize = size + cursor.prefetchrows = cursor.arraysize + cursor.execute( + statement, + parameters, + fetch_decimals=fetch_decimals, + ) + if size is None: + yield cursor._impl.fetch_df_all(cursor) + else: + yield from cursor._impl.fetch_df_batches(cursor, batch_size=size) + + def getSodaDatabase(self) -> SodaDatabase: + """ + Returns a SodaDatabase object for Simple Oracle Document Access (SODA). + All SODA operations are performed either on the returned SodaDatabase + object or from objects created by the returned SodaDatabase object. See + `here `__ for + additional information on SODA. + """ + self._verify_connected() + db_impl = self._impl.create_soda_database_impl(self) + return SodaDatabase._from_impl(self, db_impl) + + def gettype(self, name: str) -> DbObjectType: + """ + Returns a type object given its name. This can then be used to create + objects which can be bound to cursors created by this connection. + """ + self._verify_connected() + obj_type_impl = self._impl.get_type(self, name) + return DbObjectType._from_impl(obj_type_impl) + + @property + def handle(self) -> int: + """ + This read-only attribute returns the Oracle Call Interface (OCI) + service context handle for the connection. It is primarily provided to + facilitate testing the creation of a connection using the OCI service + context handle. + + This property is only relevant to python-oracledb's thick mode. + """ + self._verify_connected() + return self._impl.get_handle() + + @property + def maxBytesPerCharacter(self) -> int: + """ + Deprecated. Use the constant value 4 instead. + """ + return 4 + + def ping(self) -> None: + """ + Pings the database to verify if the connection is valid. An exception + is thrown if it is not, in which case the connection should not be used + by the application and a new connection should be established instead. + + This function performs a :ref:`round-trip ` to the + database, so it should not be used unnecessarily. + + Note connection pools will perform the same health check automatically, + based on configuration settings. See :ref:`poolhealth`. + + Also, see :meth:`is_healthy()` for a lightweight alternative. + """ + self._verify_connected() + self._impl.ping() + + def prepare(self) -> bool: + """ + Prepares the distributed (global) transaction for commit. Returns a + boolean indicating if a transaction was actually prepared in order to + avoid the error ``ORA-24756 (transaction does not exist)``. + """ + return self.tpc_prepare() + + def rollback(self) -> None: + """ + Rolls back any pending transactions. + """ + self._verify_connected() + self._impl.rollback() + + def shutdown(self, mode: int = 0) -> None: + """ + Shuts down the database. In order to do this the connection must be + connected as :data:`~oracledb.SYSDBA` or :data:`~oracledb.SYSOPER`. Two + calls must be made unless the mode specified is + :data:`~oracledb.DBSHUTDOWN_ABORT`. + """ + self._verify_connected() + self._impl.shutdown(mode) + + def startup( + self, + force: bool = False, + restrict: bool = False, + pfile: Optional[str] = None, + ) -> None: + """ + Starts up the database. This is equivalent to the SQL*Plus command + ``startup nomount``. The connection must be connected as + :data:`~oracledb.SYSDBA` or :data:`~oracledb.SYSOPER` with the + :data:`~oracledb.PRELIM_AUTH` option specified for this to work. + + The ``pfile`` parameter, if specified, is expected to be a string + identifying the location of the parameter file (PFILE) which will be + used instead of the stored parameter file (SPFILE). + """ + self._verify_connected() + self._impl.startup(force, restrict, pfile) + + def subscribe( + self, + namespace: int = oracledb.SUBSCR_NAMESPACE_DBCHANGE, + protocol: int = oracledb.SUBSCR_PROTO_CALLBACK, + callback: Optional[Callable] = None, + timeout: int = 0, + operations: int = oracledb.OPCODE_ALLOPS, + port: int = 0, + qos: int = oracledb.SUBSCR_QOS_DEFAULT, + ip_address: Optional[str] = None, + grouping_class: int = oracledb.SUBSCR_GROUPING_CLASS_NONE, + grouping_value: int = 0, + grouping_type: int = oracledb.SUBSCR_GROUPING_TYPE_SUMMARY, + name: Optional[str] = None, + client_initiated: bool = False, + *, + ipAddress: Optional[str] = None, + groupingClass: int = oracledb.SUBSCR_GROUPING_CLASS_NONE, + groupingValue: int = 0, + groupingType: int = oracledb.SUBSCR_GROUPING_TYPE_SUMMARY, + clientInitiated: bool = False, + ) -> Subscription: + """ + Returns a new subscription object that receives notifications for + events that take place in the database that match the given parameters. + + The ``namespace`` parameter specifies the namespace the subscription + uses. It can be one of :data:`oracledb.SUBSCR_NAMESPACE_DBCHANGE` or + :data:`oracledb.SUBSCR_NAMESPACE_AQ`. + + The ``protocol`` parameter specifies the protocol to use when + notifications are sent. Currently the only valid value is + :data:`oracledb.SUBSCR_PROTO_OCI`. + + The ``callback`` is expected to be a callable that accepts a single + parameter. A :ref:`message object ` is passed to this + callback whenever a notification is received. + + The ``timeout`` value specifies that the subscription expires after the + given time in seconds. The default value of *0* indicates that the + subscription never expires. + + The ``operations`` parameter enables filtering of the messages that are + sent (insert, update, delete). The default value will send + notifications for all operations. This parameter is only used when the + namespace is set to :data:`oracledb.SUBSCR_NAMESPACE_DBCHANGE`. + + The ``port`` parameter specifies the listening port for callback + notifications from the database server. If not specified, an unused + port will be selected by the Oracle Client libraries. + + The ``qos`` parameter specifies quality of service options. It should + be one or more of the following flags, OR'ed together: + :data:`oracledb.SUBSCR_QOS_RELIABLE`, + :data:`oracledb.SUBSCR_QOS_DEREG_NFY`, + :data:`oracledb.SUBSCR_QOS_ROWIDS`, :data:`oracledb.SUBSCR_QOS_QUERY`, + :data:`oracledb.SUBSCR_QOS_BEST_EFFORT`. + + The ``ip_address`` parameter specifies the IP address (*IPv4* or + *IPv6*) in standard string notation to bind for callback notifications + from the database server. If not specified, the client IP address will + be determined by the Oracle Client libraries. + + The ``grouping_class`` parameter specifies what type of grouping of + notifications should take place. Currently, if set, this value can only + be set to the value :data:`oracledb.SUBSCR_GROUPING_CLASS_TIME`, which + will group notifications by the number of seconds specified in the + ``grouping_value`` parameter. The ``grouping_type`` parameter should be + one of the values :data:`oracledb.SUBSCR_GROUPING_TYPE_SUMMARY` (the + default) or :data:`oracledb.SUBSCR_GROUPING_TYPE_LAST`. + + The ``name`` parameter is used to identify the subscription and is + specific to the selected namespace. If the namespace parameter is + :data:`oracledb.SUBSCR_NAMESPACE_DBCHANGE` then the name is optional + and can be any value. If the namespace parameter is + :data:`oracledb.SUBSCR_NAMESPACE_AQ`, however, the name must be in the + format '' for single consumer queues and + ':' for multiple consumer queues, and + identifies the queue that will be monitored for messages. The queue + name may include the schema, if needed. + + The ``client_initiated`` parameter is used to determine if client + initiated connections or server initiated connections (the default) + will be established. Client initiated connections are only available in + Oracle Client 19.4 and Oracle Database 19.4 and higher. + + For consistency and compliance with the PEP 8 naming style, the + parameter ``ipAddress`` was renamed to ``ip_address``, the parameter + ``groupingClass`` was renamed to ``grouping_class``, the parameter + ``groupingValue`` was renamed to ``grouping_value``, the parameter + ``groupingType`` was renamed to ``grouping_type`` and the parameter + ``clientInitiated`` was renamed to ``client_initiated``. The old names + will continue to work as keyword parameters for a period of time. + """ + self._verify_connected() + if ipAddress is not None: + if ip_address is not None: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="ipAddress", + new_name="ip_address", + ) + ip_address = ipAddress + if groupingClass != oracledb.SUBSCR_GROUPING_CLASS_NONE: + if grouping_class != oracledb.SUBSCR_GROUPING_CLASS_NONE: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="groupingClass", + new_name="grouping_class", + ) + grouping_class = groupingClass + if groupingValue != 0: + if grouping_value != 0: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="groupingValue", + new_name="grouping_value", + ) + grouping_value = groupingValue + if groupingType != oracledb.SUBSCR_GROUPING_TYPE_SUMMARY: + if grouping_type != oracledb.SUBSCR_GROUPING_TYPE_SUMMARY: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="groupingType", + new_name="grouping_type", + ) + grouping_type = groupingType + if clientInitiated: + if client_initiated: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="clientInitiated", + new_name="client_initiated", + ) + client_initiated = clientInitiated + impl = self._impl.create_subscr_impl( + self, + callback, + namespace, + name, + protocol, + ip_address, + port, + timeout, + operations, + qos, + grouping_class, + grouping_value, + grouping_type, + client_initiated, + ) + subscr = Subscription._from_impl(impl) + impl.subscribe(subscr, self._impl) + return subscr + + def suspend_sessionless_transaction(self) -> None: + """ + Suspends the currently active sessionless transaction immediately. + + This detaches the transaction from the connection, allowing it to be + resumed later with the transaction identifier that was specified during + creation of the sessionless transaction. The ``timeout`` previously + passed to :meth:`begin_sessionless_transaction()` determines how long + the transaction can stay suspended before it is automatically rolled + back. + """ + self._verify_connected() + self._impl.suspend_sessionless_transaction() + + def tpc_begin( + self, xid: Xid, flags: int = oracledb.TPC_BEGIN_NEW, timeout: int = 0 + ) -> None: + """ + Begins a Two-Phase Commit (TPC) on a global transaction using the + specified transaction identifier (xid). + + The ``xid`` parameter should be an object returned by the + :meth:`xid()` method. + + The ``flags`` parameter is one of the constants + :data:`oracledb.TPC_BEGIN_JOIN`, :data:`oracledb.TPC_BEGIN_NEW`, + :data:`oracledb.TPC_BEGIN_PROMOTE`, or + :data:`oracledb.TPC_BEGIN_RESUME`. The default is + :data:`oracledb.TPC_BEGIN_NEW`. + + The ``timeout`` parameter is the number of seconds to wait for a + transaction to become available for resumption when + :data:`~oracledb.TPC_BEGIN_RESUME` is specified in the ``flags`` + parameter. When :data:`~oracledb.TPC_BEGIN_NEW` is specified in the + ``flags`` parameter, the ``timeout`` parameter indicates the number of + seconds the transaction can be inactive before it is automatically + terminated by the system. A transaction is inactive between the time it + is detached with :meth:`tpc_end()` and the time it is resumed with + :meth:`tpc_begin()`.The default is *0* seconds. + """ + self._verify_connected() + self._verify_xid(xid) + if flags not in ( + oracledb.TPC_BEGIN_NEW, + oracledb.TPC_BEGIN_JOIN, + oracledb.TPC_BEGIN_RESUME, + oracledb.TPC_BEGIN_PROMOTE, + ): + errors._raise_err(errors.ERR_INVALID_TPC_BEGIN_FLAGS) + self._impl.tpc_begin(xid, flags, timeout) + + def tpc_commit( + self, xid: Optional[Xid] = None, one_phase: bool = False + ) -> None: + """ + Commits a global transaction. When called with no arguments, this + method commits a transaction previously prepared with + :meth:`tpc_begin()` and optionally prepared with :meth:`tpc_prepare()`. + If :meth:`tpc_prepare()` is not called, a single phase commit is + performed. A transaction manager may choose to do this if only a single + resource is participating in the global transaction. + + If an ``xid`` parameter is passed, then an object should be returned by + the :meth:`xid()` function. This form should be called outside of a + transaction and is intended for use in recovery. + + The ``one_phase`` parameter is a boolean identifying whether to perform + a one-phase or two-phase commit. If ``one_phase`` parameter is *True*, + a single-phase commit is performed. The default value is *False*. This + parameter is only examined if a value is provided for the ``xid`` + parameter. Otherwise, the driver already knows whether + :meth:`tpc_prepare()` was called for the transaction and whether a + one-phase or two-phase commit is required. + """ + self._verify_connected() + if xid is not None: + self._verify_xid(xid) + self._impl.tpc_commit(xid, one_phase) + + def tpc_end( + self, xid: Optional[Xid] = None, flags: int = oracledb.TPC_END_NORMAL + ) -> None: + """ + Ends or suspends work on a global transaction. This function is only + intended for use by transaction managers. + + If an ``xid`` parameter is passed, then an object should be returned by + the :meth:`xid()` function. If no xid parameter is passed, then the + transaction identifier used by the previous :meth:`tpc_begin()` is + used. + + The ``flags`` parameter is one of the constants + :data:`oracledb.TPC_END_NORMAL` or :data:`oracledb.TPC_END_SUSPEND`. + The default is :data:`oracledb.TPC_END_NORMAL`. + + If the flag is :data:`oracledb.TPC_END_SUSPEND` then the transaction + may be resumed later by calling :meth:`tpc_begin()` with the flag + :data:`oracledb.TPC_BEGIN_RESUME`. + """ + self._verify_connected() + if xid is not None: + self._verify_xid(xid) + if flags not in (oracledb.TPC_END_NORMAL, oracledb.TPC_END_SUSPEND): + errors._raise_err(errors.ERR_INVALID_TPC_END_FLAGS) + self._impl.tpc_end(xid, flags) + + def tpc_forget(self, xid: Xid) -> None: + """ + Causes the database to forget a heuristically completed TPC + transaction. This function is only intended to be called by + transaction managers. + + The ``xid`` parameter is mandatory and should be an object should be + returned by the :meth:`xid()` function. + """ + self._verify_connected() + self._verify_xid(xid) + self._impl.tpc_forget(xid) + + def tpc_prepare(self, xid: Optional[Xid] = None) -> bool: + """ + Prepares a two-phase transaction for commit. After this function is + called, no further activity should take place on this connection until + either :meth:`tpc_commit()` or :meth:`tpc_rollback()` have been called. + + Returns a boolean indicating whether a commit is needed or not. If you + attempt to commit when not needed, then it results in the error + ``ORA-24756: transaction does not exist``. + + If an ``xid`` parameter is passed, then an object should be returned by + the :meth:`xid()` function. If an ``xid`` parameter is not passed, then + the transaction identifier used by the previous :meth:`tpc_begin()` is + used. + """ + self._verify_connected() + if xid is not None: + self._verify_xid(xid) + return self._impl.tpc_prepare(xid) + + def tpc_recover(self) -> list: + """ + Returns a list of pending transaction identifiers that require + recovery. Objects of type ``Xid`` (as returned by the + :meth:`xid()` function) are returned and these can be passed to + :meth:`tpc_commit()` or :meth:`tpc_rollback()` as needed. + + This function queries the DBA_PENDING_TRANSACTIONS view and requires + "SELECT" privilege on that view. + """ + with self.cursor() as cursor: + cursor.execute( + """ + select + formatid, + globalid, + branchid + from dba_pending_transactions""" + ) + cursor.rowfactory = Xid + return cursor.fetchall() + + def tpc_rollback(self, xid: Optional[Xid] = None) -> None: + """ + If an ``xid`` parameter is not passed, then it rolls back the + transaction that was previously started with + :meth:`tpc_begin()`. + + If an ``xid`` parameter is passed, then an object should be returned by + :meth:`xid()` and the specified transaction is rolled back. This form + should be called outside of a transaction and is intended for use in + recovery. + """ + self._verify_connected() + if xid is not None: + self._verify_xid(xid) + self._impl.tpc_rollback(xid) + + def unsubscribe(self, subscr: Subscription) -> None: + """ + Unsubscribe from events in the database that were originally subscribed + to using :meth:`subscribe()`. The connection used to unsubscribe should + be the same one used to create the subscription, or should access the + same database and be connected as the same user name. + """ + self._verify_connected() + if not isinstance(subscr, Subscription): + raise TypeError("expecting subscription") + subscr._impl.unsubscribe(self._impl) + + +def _connection_factory( + f: Callable[..., Connection], +) -> Callable[..., Connection]: + """ + Decorator which checks the validity of the supplied keyword parameters by + calling the original function (which does nothing), then creates and + returns an instance of the requested Connection class. The base Connection + class constructor does not check the validity of the supplied keyword + parameters. + """ + + @functools.wraps(f) + def connect( + dsn: Optional[str] = None, + *, + pool: Optional["pool_module.ConnectionPool"] = None, + pool_alias: Optional[str] = None, + conn_class: Type[Connection] = Connection, + params: Optional[ConnectParams] = None, + **kwargs, + ) -> Connection: + f( + dsn=dsn, + pool=pool, + pool_alias=pool_alias, + conn_class=conn_class, + params=params, + **kwargs, + ) + if not issubclass(conn_class, Connection): + errors._raise_err(errors.ERR_INVALID_CONN_CLASS) + if pool is not None and pool_alias is not None: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="pool", + new_name="pool_alias", + ) + if pool_alias is not None: + pool = pool_module.named_pools.pools.get(pool_alias) + if pool is None: + errors._raise_err( + errors.ERR_NAMED_POOL_MISSING, alias=pool_alias + ) + if pool is not None and not isinstance( + pool, pool_module.ConnectionPool + ): + message = "pool must be an instance of oracledb.ConnectionPool" + raise TypeError(message) + return conn_class(dsn=dsn, pool=pool, params=params, **kwargs) + + return connect + + +@_connection_factory +def connect( + dsn: Optional[str] = None, + *, + pool: Optional["pool_module.ConnectionPool"] = None, + pool_alias: Optional[str] = None, + conn_class: Type[Connection] = Connection, + params: Optional[ConnectParams] = None, + user: Optional[str] = None, + proxy_user: Optional[str] = None, + password: Optional[str] = None, + newpassword: Optional[str] = None, + wallet_password: Optional[str] = None, + access_token: Optional[Union[str, tuple, Callable]] = None, + host: Optional[str] = None, + port: Optional[int] = None, + protocol: Optional[str] = None, + https_proxy: Optional[str] = None, + https_proxy_port: Optional[int] = None, + service_name: Optional[str] = None, + instance_name: Optional[str] = None, + sid: Optional[str] = None, + server_type: Optional[str] = None, + cclass: Optional[str] = None, + purity: Optional[oracledb.Purity] = None, + expire_time: Optional[int] = None, + retry_count: Optional[int] = None, + retry_delay: Optional[int] = None, + tcp_connect_timeout: Optional[float] = None, + ssl_server_dn_match: Optional[bool] = None, + ssl_server_cert_dn: Optional[str] = None, + wallet_location: Optional[str] = None, + events: Optional[bool] = None, + externalauth: Optional[bool] = None, + mode: Optional[oracledb.AuthMode] = None, + disable_oob: Optional[bool] = None, + stmtcachesize: Optional[int] = None, + edition: Optional[str] = None, + tag: Optional[str] = None, + matchanytag: Optional[bool] = None, + config_dir: Optional[str] = None, + appcontext: Optional[list] = None, + shardingkey: Optional[list] = None, + supershardingkey: Optional[list] = None, + debug_jdwp: Optional[str] = None, + connection_id_prefix: Optional[str] = None, + ssl_context: Optional[Any] = None, + sdu: Optional[int] = None, + pool_boundary: Optional[str] = None, + use_tcp_fast_open: Optional[bool] = None, + ssl_version: Optional[ssl.TLSVersion] = None, + program: Optional[str] = None, + machine: Optional[str] = None, + terminal: Optional[str] = None, + osuser: Optional[str] = None, + driver_name: Optional[str] = None, + use_sni: Optional[bool] = None, + thick_mode_dsn_passthrough: Optional[bool] = None, + extra_auth_params: Optional[dict] = None, + pool_name: Optional[str] = None, + handle: Optional[int] = None, +) -> Connection: + """ + Factory function which creates a connection to the database and returns it. + + The ``dsn`` parameter (data source name) can be a string in the format + user/password@connect_string or can simply be the connect string (in + which case authentication credentials such as the username and password + need to be specified separately). See the documentation on connection + strings for more information. + + The ``pool`` parameter is expected to be a pool object. This parameter was + deprecated in python-oracledb 3.0.0. Use :meth:`ConnectionPool.acquire()` + instead since the use of this parameter is the equivalent of calling this + method. + + The ``conn_class`` parameter is expected to be Connection or a subclass of + Connection. + + The ``params`` parameter is expected to be of type ConnectParams and + contains connection parameters that will be used when establishing the + connection. See the documentation on ConnectParams for more information. + If this parameter is not specified, the additional keyword parameters will + be used to create an instance of ConnectParams. If both the ``params`` + parameter and additional keyword parameters are specified, the values in + the keyword parameters have precedence. Note that if a ``dsn`` is also + supplied, then in python-oracledb Thin mode, the values of the parameters + specified (if any) within the ``dsn`` will override the values passed as + additional keyword parameters, which themselves override the values set in + the ``params`` parameter object. + + The following parameters are all optional. A brief description of each + parameter follows: + + - ``user``: the name of the database user to connect to + (default: None) + + - ``proxy_user``: the name of the proxy user to connect to. If this value + is not specified, it will be parsed out of user if user is in the form + "user[proxy_user]" + (default: None) + + - ``password``: the password for the database user + (default: None) + + - ``newpassword``: a new password for the database user. The new password + will take effect immediately upon a successful connection to the database + (default: None) + + - ``wallet_password``: the password to use to decrypt the wallet, if it is + encrypted. This is not the database password. For Oracle Autonomous + Database this is the password created when downloading the wallet. This + value is only used in python-oracledb Thin mode. + (default: None) + + - ``access_token``: a string, or a 2-tuple, or a callable. If it is a + string, it specifies an Entra ID OAuth2 token used for Open Authorization + (OAuth 2.0) token based authentication. If it is a 2-tuple, it specifies + the token and private key strings used for Oracle Cloud Infrastructure + (OCI) Identity and Access Management (IAM) token based authentication. If + it is a callable, it returns either a string or a 2-tuple used for OAuth + 2.0 or OCI IAM token based authentication and is useful when the pool + needs to expand and create new connections but the current authentication + token has expired + (default: None) + + - ``host``: the hostname or IP address of the machine hosting the database + or the database listener + (default: None) + + - ``port``: the port number on which the database listener is listening + (default: 1521) + + - ``protocol``: one of the strings "tcp" or "tcps" indicating whether to + use unencrypted network traffic or encrypted network traffic (TLS) + (default: "tcp") + + - ``https_proxy``: the hostname or IP address of a proxy host to use for + tunneling secure connections + (default: None) + + - ``https_proxy_port``: the port on which to communicate with the proxy + host + (default: 0) + + - ``service_name``: the service name of the database + (default: None) + + - ``instance_name``: the instance name of the database + (default: None) + + - ``sid``: the system identifier (SID) of the database. Note using a + service_name instead is recommended + (default: None) + + - ``server_type``: the type of server connection that should be + established. If specified, it should be one of strings "dedicated", + "shared" or "pooled" + (default: None) + + - ``cclass``: the connection class to use for Database Resident Connection + Pooling (DRCP) + (default: None) + + - ``purity``: the connection purity to use for Database Resident Connection + Pooling (DRCP) + (default: :attr:`oracledb.PURITY_DEFAULT`) + + - ``expire_time``: the number of minutes between the sending of keepalive + probes. If this parameter is set to a value greater than zero it enables + keepalive + (default: 0) + + - ``retry_count``: the number of times that initial connection + establishment should be retried before the connection attempt is + terminated + (default: 0) + + - ``retry_delay``: the number of seconds to wait before retrying to + establish a connection + (default: 1) + + - ``tcp_connect_timeout``: a float indicating the maximum number of seconds + to wait when establishing a connection to the database host + (default: 20.0) + + - ``ssl_server_dn_match``: a boolean indicating whether the server + certificate distinguished name (DN) should be matched in addition to the + regular certificate verification that is performed. Note that if the + ssl_server_cert_dn parameter is not privided, host name matching is + performed instead + (default: True) + + - ``ssl_server_cert_dn``: the distinguished name (DN) which should be + matched with the server. This value is ignored if the ssl_server_dn_match + parameter is not set to the value True. If specified this value is used + for any verfication. Otherwise the hostname will be used + (default: None) + + - ``wallet_location``: the directory where the wallet can be found. In + python-oracledb Thin mode this must be the directory containing the PEM- + encoded wallet file ewallet.pem. In python-oracledb Thick mode this must + be the directory containing the file cwallet.sso + (default: None) + + - ``events``: a boolean specifying whether events mode should be enabled. + This value is only used in python-oracledb Thick mode and is needed for + continuous query notification and high availability event notifications + (default: False) + + - ``externalauth``: a boolean indicating whether to use external + authentication + (default: False) + + - ``mode``: the authorization mode to use. One of the constants + :data:`oracledb.AUTH_MODE_DEFAULT`, :data:`oracledb.AUTH_MODE_PRELIM`, + :data:`oracledb.AUTH_MODE_SYSASM`, :data:`oracledb.AUTH_MODE_SYSBKP`, + :data:`oracledb.AUTH_MODE_SYSDBA`, :data:`oracledb.AUTH_MODE_SYSDGD`, + :data:`oracledb.AUTH_MODE_SYSKMT`, :data:`oracledb.AUTH_MODE_SYSOPER`, or + :data:`oracledb.AUTH_MODE_SYSRAC` + (default: :attr:`oracledb.AUTH_MODE_DEFAULT`) + + - ``disable_oob``: a boolean indicating whether out-of-band breaks should + be disabled. This value is only used in python-oracledb Thin mode. It has + no effect on Windows which does not support this functionality + (default: False) + + - ``stmtcachesize``: the size of the statement cache + (default: :attr:`oracledb.defaults.stmtcachesize + `) + + - ``edition``: edition to use for the connection. This parameter cannot be + used simultaneously with the cclass parameter + (default: None) + + - ``tag``: identifies the type of connection that should be returned from a + pool. This value is only used in python-oracledb Thick mode + (default: None) + + - ``matchanytag``: a boolean specifying whether any tag can be used when + acquiring a connection from the pool. This value is only used in python- + oracledb Thick mode + (default: False) + + - ``config_dir``: a directory in which the optional tnsnames.ora + configuration file is located. This value is only used in python-oracledb + Thin mode. For python-oracledb Thick mode, it is used if + :attr:`oracledb.defaults.thick_mode_dsn_passthrough + ` is *False*. Otherwise in Thick + mode use the ``config_dir`` parameter of + :meth:`oracledb.init_oracle_client()` + (default: :attr:`oracledb.defaults.config_dir + `) + + - ``appcontext``: application context used by the connection. It should be + a list of 3-tuples (namespace, name, value) and each entry in the tuple + should be a string + (default: None) + + - ``shardingkey``: a list of strings, numbers, bytes or dates that identify + the database shard to connect to. This value is only used in python- + oracledb Thick mode + (default: None) + + - ``supershardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + (default: None) + + - ``debug_jdwp``: a string with the format "host=;port=" that + specifies the host and port of the PL/SQL debugger. This value is only + used in python-oracledb Thin mode. For python-oracledb Thick mode set + the ORA_DEBUG_JDWP environment variable + (default: None) + + - ``connection_id_prefix``: an application specific prefix that is added to + the connection identifier used for tracing + (default: None) + + - ``ssl_context``: an SSLContext object used for connecting to the database + using TLS. This SSL context will be modified to include the private key + or any certificates found in a separately supplied wallet. This parameter + should only be specified if the default SSLContext object cannot be used + (default: None) + + - ``sdu``: the requested size of the Session Data Unit (SDU), in bytes. The + value tunes internal buffers used for communication to the database. + Bigger values can increase throughput for large queries or bulk data + loads, but at the cost of higher memory use. The SDU size that will + actually be used is negotiated down to the lower of this value and the + database network SDU configuration value + (default: 8192) + + - ``pool_boundary``: one of the values "statement" or "transaction" + indicating when pooled DRCP connections can be returned to the pool. This + requires the use of DRCP with Oracle Database 23.4 or higher + (default: None) + + - ``use_tcp_fast_open``: a boolean indicating whether to use TCP fast open. + This is an Oracle Autonomous Database Serverless (ADB-S) specific + property for clients connecting from within OCI Cloud network. Please + refer to the ADB-S documentation for more information + (default: False) + + - ``ssl_version``: one of the values ssl.TLSVersion.TLSv1_2 or + ssl.TLSVersion.TLSv1_3 indicating which TLS version to use + (default: None) + + - ``program``: a string recorded by Oracle Database as the program from + which the connection originates + (default: :attr:`oracledb.defaults.program + `) + + - ``machine``: a string recorded by Oracle Database as the name of the + machine from which the connection originates + (default: :attr:`oracledb.defaults.machine + `) + + - ``terminal``: a string recorded by Oracle Database as the terminal + identifier from which the connection originates + (default: :attr:`oracledb.defaults.terminal + `) + + - ``osuser``: a string recorded by Oracle Database as the operating system + user who originated the connection + (default: :attr:`oracledb.defaults.osuser + `) + + - ``driver_name``: a string recorded by Oracle Database as the name of the + driver which originated the connection + (default: :attr:`oracledb.defaults.driver_name + `) + + - ``use_sni``: a boolean indicating whether to use the TLS SNI extension to + bypass the second TLS neogiation that would otherwise be required + (default: False) + + - ``thick_mode_dsn_passthrough``: a boolean indicating whether to pass the + connect string to the Oracle Client libraries unchanged without parsing + by the driver. Setting this to False makes python-oracledb Thick and Thin + mode applications behave similarly regarding connection string parameter + handling and locating any optional tnsnames.ora configuration file + (default: :attr:`oracledb.defaults.thick_mode_dsn_passthrough + `) + + - ``extra_auth_params``: a dictionary containing configuration parameters + necessary for Oracle Database authentication using plugins, such as the + Azure and OCI cloud-native authentication plugins + (default: None) + + - ``pool_name``: the name of the DRCP pool when using multi-pool DRCP with + Oracle Database 23.4, or higher + (default: None) + + - ``handle``: an integer representing a pointer to a valid service context + handle. This value is only used in python-oracledb Thick mode. It should + be used with extreme caution + (default: 0) + """ + pass + + +class AsyncConnection(BaseConnection): + + def __init__( + self, + dsn: str, + pool: pool_module.AsyncConnectionPool, + params: ConnectParams, + kwargs: dict, + ) -> None: + """ + Constructor for creating an asynchronous connection to the database. + """ + super().__init__() + self._pool = pool + self._connect_coroutine = self._connect(dsn, pool, params, kwargs) + + def __await__(self): + coroutine = self._connect_coroutine + self._connect_coroutine = None + return coroutine.__await__() + + async def __aenter__(self): + """ + The entry point for the asynchronous connection as a context manager. + It returns itself. + """ + if self._connect_coroutine is not None: + await self._connect_coroutine + else: + self._verify_connected() + return self + + async def __aexit__(self, *exc_info): + """ + The exit point for the asynchronous connection as a context manager. + This will close the connection and roll back any uncommitted + transaction. + """ + if self._impl is not None: + await self._close() + + async def _close(self, in_del=False): + """ + Closes the connection and makes it unusable for further operations. An + Error exception will be raised if any operation is attempted with this + connection after this method completes successfully. + """ + if self._pool is not None: + pool_impl = self._pool._impl + if pool_impl is not None: + await pool_impl.return_connection(self._impl, in_del) + else: + await self._impl.close(in_del) + self._impl = None + + async def _connect(self, dsn, pool, params, kwargs): + """ + Internal method for establishing a connection to the database using + asyncio. + """ + + # mandate that thin mode is required; with asyncio, only thin mode is + # supported and only one thread is executing, so the manager can be + # manipulated directly + driver_mode.manager.thin_mode = True + + # determine which connection parameters to use + if params is None: + params_impl = base_impl.ConnectParamsImpl() + elif not isinstance(params, ConnectParams): + errors._raise_err(errors.ERR_INVALID_CONNECT_PARAMS) + else: + params_impl = params._impl.copy() + dsn = params_impl.process_args(dsn, kwargs, thin=True) + + # see if connection is being acquired from a pool + if pool is None: + pool_impl = None + elif not isinstance(pool, pool_module.AsyncConnectionPool): + message = ( + "pool must be an instance of oracledb.AsyncConnectionPool" + ) + raise TypeError(message) + else: + pool._verify_open() + pool_impl = pool._impl + + # create implementation object + if pool is not None: + impl = await pool_impl.acquire(params_impl) + else: + impl = thin_impl.AsyncThinConnImpl(dsn, params_impl) + await impl.connect(params_impl) + self._impl = impl + + # invoke callback, if applicable + if ( + impl.invoke_session_callback + and pool is not None + and pool.session_callback is not None + and callable(pool.session_callback) + ): + await pool.session_callback(self, params_impl.tag) + impl.invoke_session_callback = False + + return self + + def _create_queue(self, impl): + """ + Returns a queue object that the user can use to dequeue and enqueue + messages. + """ + return AsyncQueue._from_impl(self, impl) + + def _verify_can_execute( + self, parameters: Any, keyword_parameters: Any + ) -> Any: + """ + Verifies that the connection can be used to execute + Verifies that the connection is connected to the database. If it is + not, an exception is raised. + """ + self._verify_connected() + if keyword_parameters: + if parameters: + errors._raise_err(errors.ERR_ARGS_AND_KEYWORD_ARGS) + return keyword_parameters + elif parameters is not None and not isinstance( + parameters, (list, tuple, dict) + ): + errors._raise_err(errors.ERR_WRONG_EXECUTE_PARAMETERS_TYPE) + return parameters + + async def begin_sessionless_transaction( + self, + transaction_id: Optional[Union[str, bytes]] = None, + timeout: int = 60, + defer_round_trip: bool = False, + ) -> bytes: + """ + Begins a new sessionless transaction. This method returns the + transaction identifier specified by the user or generated by + python-oracledb. + + The ``transaction_id`` parameter should be of type string or bytes. If + specified, it represents a unique identifier for the transaction. If a + string is passed, then it will be UTF-8 encoded to bytes. If this value + is not specified, then python-oracledb generates a a random + `universally-unique identifier (UUID) `__ value when this function is called. An example is + "36b8f84d-df4e-4d49-b662-bcde71a8764f". The user-chosen value cannot + exceed 64 bytes in length. + + The ``timeout`` parameter is the number of seconds that this + transaction can stay suspended when + :meth:`suspend_sessionless_transaction()` is later called, or if the + transaction is automatically suspended when the ``suspend_on_success`` + parameter is set to to *True* in :meth:`AsyncCursor.execute()` or + :meth:`AsyncCursor.executemany()`. The default value is *60* seconds. + If a transaction is not resumed within this specified duration, the + transaction will be rolled back. + + The ``defer_round_trip`` parameter is a boolean that determines whether + the request to start a transaction is to be sent immediately or with + the next database operation. If set to *False*, the request is sent + immediately. If set to *True*, the request is included with the next + database operation on the connection. The default value is *False*. + """ + self._verify_connected() + normalized_txnid = normalize_sessionless_transaction_id(transaction_id) + + if not isinstance(timeout, int) or timeout <= 0: + raise TypeError("timeout must be a positive integer") + + await self._impl.begin_sessionless_transaction( + normalized_txnid, timeout, defer_round_trip + ) + return normalized_txnid + + async def callfunc( + self, + name: str, + return_type: Any, + parameters: Optional[Union[list, tuple]] = None, + keyword_parameters: Optional[dict] = None, + ) -> Any: + """ + Calls a PL/SQL function with the given name. + + This is a shortcut for calling :meth:`cursor()`, + :meth:`AsyncCursor.callfunc()`, and then :meth:`AsyncCursor.close()`. + """ + with self.cursor() as cursor: + return await cursor.callfunc( + name, return_type, parameters, keyword_parameters + ) + + async def callproc( + self, + name: str, + parameters: Optional[Union[list, tuple]] = None, + keyword_parameters: Optional[dict] = None, + ) -> list: + """ + Calls a PL/SQL procedure with the given name. + + This is a shortcut for calling :meth:`cursor()`, + :meth:`AsyncCursor.callproc()`, and then :meth:`AsyncCursor.close()`. + """ + with self.cursor() as cursor: + return await cursor.callproc(name, parameters, keyword_parameters) + + async def changepassword( + self, old_password: str, new_password: str + ) -> None: + """ + Changes the password for the user to which the connection is connected. + """ + self._verify_connected() + await self._impl.change_password(old_password, new_password) + + async def close(self) -> None: + """ + Closes the connection. + """ + self._verify_connected() + await self._close() + + async def commit(self) -> None: + """ + Commits any pending transaction to the database. + """ + self._verify_connected() + await self._impl.commit() + + async def createlob( + self, lob_type: DbType, data: Optional[Union[str, bytes]] = None + ) -> AsyncLOB: + """ + Creates and returns a new temporary LOB of the specified type. + """ + self._verify_connected() + if lob_type not in (DB_TYPE_CLOB, DB_TYPE_NCLOB, DB_TYPE_BLOB): + message = ( + "parameter should be one of oracledb.DB_TYPE_CLOB, " + "oracledb.DB_TYPE_BLOB or oracledb.DB_TYPE_NCLOB" + ) + raise TypeError(message) + impl = await self._impl.create_temp_lob_impl(lob_type) + lob = AsyncLOB._from_impl(impl) + if data: + await lob.write(data) + return lob + + def cursor(self, scrollable: bool = False) -> AsyncCursor: + """ + Returns an :ref:`AsyncCursor object ` associated with + the connection. + """ + self._verify_connected() + return AsyncCursor(self, scrollable) + + async def execute( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + ) -> None: + """ + Executes a statement against the database. + + This is a shortcut for calling :meth:`cursor()`, + :meth:`AsyncCursor.execute()`, and then :meth:`AsyncCursor.close()` + """ + with self.cursor() as cursor: + await cursor.execute(statement, parameters) + + async def executemany( + self, statement: Union[str, None], parameters: Any + ) -> None: + """ + Executes a SQL statement once using all bind value mappings or + sequences found in the sequence parameters. This can be used to insert, + update, or delete multiple rows in a table with a single + python-oracledb call. It can also invoke a PL/SQL procedure multiple + times. + + The ``parameters`` parameter can be a list of tuples, where each tuple + item maps to one bind variable placeholder in ``statement``. It can + also be a list of dictionaries, where the keys match the bind variable + placeholder names in ``statement``. If there are no bind values, or + values have previously been bound, the ``parameters`` value can be an + integer specifying the number of iterations. + + This is a shortcut for calling :meth:`cursor()`, + :meth:`AsyncCursor.executemany()`, and then + :meth:`AsyncCursor.close()`. + """ + with self.cursor() as cursor: + await cursor.executemany(statement, parameters) + + async def fetchall( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + arraysize: Optional[int] = None, + rowfactory: Optional[Callable] = None, + *, + fetch_lobs: Optional[bool] = None, + fetch_decimals: Optional[bool] = None, + ) -> list: + """ + Executes a query and returns all of the rows. + + The default value for ``arraysize`` is + :attr:`oracledb.defaults.arraysize `. + + Internally, this method's :attr:`AsyncCursor.prefetchrows` size is set + to the value of the explicit or default ``arraysize`` parameter value. + + This is a shortcut for calling :meth:`cursor()`, + :meth:`AsyncCursor.fetchall()`, and then :meth:`AsyncCursor.close()`. + """ + with self.cursor() as cursor: + if arraysize is not None: + cursor.arraysize = arraysize + cursor.prefetchrows = cursor.arraysize + await cursor.execute( + statement, + parameters, + fetch_lobs=fetch_lobs, + fetch_decimals=fetch_decimals, + ) + cursor.rowfactory = rowfactory + return await cursor.fetchall() + + async def direct_path_load( + self, + schema_name: str, + table_name: str, + column_names: list[str], + data: Any, + *, + batch_size: int = 2**32 - 1, + ) -> None: + """ + Load data into Oracle Database using the Direct Path Load interface. + It is available only in python-oracledb Thin mode. + + The ``data`` parameter can be a list of sequences, a DataFrame, or a + third-party DataFrame instance that supports the Apache Arrow PyCapsule + Interface. + + The ``batch_size`` parameter is used to split large data sets into + smaller pieces for sending to the database. It is the number of records + in each batch. This parameter can be used to tune performance. + """ + self._verify_connected() + await self._impl.direct_path_load( + schema_name, table_name, column_names, data, batch_size + ) + + async def fetch_df_all( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + arraysize: Optional[int] = None, + *, + fetch_decimals: Optional[bool] = None, + requested_schema: Optional[Any] = None, + ) -> DataFrame: + """ + Fetches all rows of the SQL query ``statement``, returning them in a + :ref:`DataFrame ` object. An empty DataFrame is + returned if there are no rows available. + + The ``parameters`` parameter can be a list of tuples, where each tuple + item maps to one :ref:`bind variable placeholder ` in + ``statement``. It can also be a list of dictionaries, where the keys + match the bind variable placeholder names in ``statement``. + + The ``arraysize`` parameter can be specified to tune performance of + fetching data across the network. It defaults to + :attr:`oracledb.defaults.arraysize `. Internally, + the ``fetch_df_all()``'s :attr:`Cursor.prefetchrows` size is always set + to the value of the explicit or default ``arraysize`` parameter value. + + The ``fetch_decimals`` parameter specifies whether to return decimal + values when fetching columns of type ``NUMBER`` that are capable of + being represented in Apache Arrow Decimal128 format. The default value + is :data:`oracledb.defaults.fetch_decimals `. + + The ``requested_schema`` parameter specifies an object that implements + the Apache Arrow PyCapsule schema interface. The DataFrame returned by + ``fetch_df_all()`` will have the data types and names of the schema. + """ + cursor = self.cursor() + cursor._impl.fetching_arrow = True + if requested_schema is not None: + cursor._impl.schema_impl = ArrowSchemaImpl.from_arrow_schema( + requested_schema + ) + if arraysize is not None: + cursor.arraysize = arraysize + cursor.prefetchrows = cursor.arraysize + await cursor.execute( + statement, + parameters, + fetch_decimals=fetch_decimals, + ) + return await cursor._impl.fetch_df_all(cursor) + + async def fetch_df_batches( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + size: Optional[int] = None, + *, + fetch_decimals: Optional[bool] = None, + requested_schema: Optional[Any] = None, + ) -> Iterator[DataFrame]: + """ + This returns an iterator yielding the next ``size`` rows of the SQL + query ``statement`` in each iteration as a :ref:`DataFrame + ` object. An empty DataFrame is returned if there + are no rows available. + + The ``parameters`` parameter can be a list of tuples, where each tuple + item maps to one :ref:`bind variable placeholder ` in + ``statement``. It can also be a list of dictionaries, where the keys + match the bind variable placeholder names in ``statement``. + + The ``size`` parameter controls the number of records fetched in each + batch. It defaults to + :attr:`oracledb.defaults.arraysize `. Internally, + the ``fetch_df_batches()``'s :attr:`Cursor.arraysize` and + :attr:`Cursor.prefetchrows` sizes are always set to the value of the + explicit or default ``size`` parameter value. + + The ``fetch_decimals`` parameter specifies whether to return + decimal values when fetching columns of type ``NUMBER`` that are + capable of being represented in Arrow Decimal128 format. The default + value is + :data:`oracledb.defaults.fetch_decimals `. + + The ``requested_schema`` parameter specifies an object that implements + the Apache Arrow PyCapsule schema interface. The DataFrame returned by + ``fetch_df_all()`` will have the data types and names of the schema. + """ + cursor = self.cursor() + cursor._impl.fetching_arrow = True + if requested_schema is not None: + cursor._impl.schema_impl = ArrowSchemaImpl.from_arrow_schema( + requested_schema + ) + if size is not None: + cursor.arraysize = size + cursor.prefetchrows = cursor.arraysize + await cursor.execute( + statement, + parameters, + fetch_decimals=fetch_decimals, + ) + if size is None: + yield await cursor._impl.fetch_df_all(cursor) + else: + async for df in cursor._impl.fetch_df_batches(cursor, size): + yield df + + async def fetchmany( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + num_rows: Optional[int] = None, + rowfactory: Optional[Callable] = None, + *, + fetch_lobs: Optional[bool] = None, + fetch_decimals: Optional[bool] = None, + ) -> list: + """ + Executes a query and returns up to the specified number of rows. + + The default value for ``num_rows`` is the value of + :attr:`oracledb.defaults.arraysize `. + + Internally, this method's :attr:`AsyncCursor.prefetchrows` size is set + to the value of the explicit or default ``num_rows`` parameter, + allowing all rows to be fetched in one :ref:`round-trip ` + + Since only one fetch is performed for a query, consider adding a + ``FETCH NEXT`` clause to the statement to prevent the database + processing rows that will never be fetched, see :ref:`rowlimit`. + + This a shortcut for calling :meth:`cursor()`, + :meth:`AsyncCursor.fetchmany()`, and then :meth:`AsyncCursor.close()`. + """ + with self.cursor() as cursor: + if num_rows is None: + num_rows = cursor.arraysize + elif num_rows <= 0: + return [] + cursor.arraysize = cursor.prefetchrows = num_rows + await cursor.execute( + statement, + parameters, + fetch_lobs=fetch_lobs, + fetch_decimals=fetch_decimals, + ) + cursor.rowfactory = rowfactory + return await cursor.fetchmany(num_rows) + + async def fetchone( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + rowfactory: Optional[Callable] = None, + *, + fetch_lobs: Optional[bool] = None, + fetch_decimals: Optional[bool] = None, + ) -> Any: + """ + Executes a query and returns the first row of the result set if one + exists (or *None* if no rows exist). + + Internally, this method's :attr:`Cursor.prefetchrows` and + :attr:`Cursor.arraysize` sizes will be set to *1*. + + Since only one fetch is performed for a query, consider adding a + ``WHERE`` condition or using a ``FETCH NEXT`` clause in the statement + to prevent the database processing rows that will never be fetched, see + :ref:`rowlimit`. + + This a shortcut for calling :meth:`cursor()`, + :meth:`AsyncCursor.fetchone()`, and then :meth:`AsyncCursor.close()`. + """ + with self.cursor() as cursor: + cursor.prefetchrows = cursor.arraysize = 1 + await cursor.execute( + statement, + parameters, + fetch_lobs=fetch_lobs, + fetch_decimals=fetch_decimals, + ) + cursor.rowfactory = rowfactory + return await cursor.fetchone() + + async def gettype(self, name: str) -> DbObjectType: + """ + Returns a type object given its name. This can then be used to create + objects which can be bound to cursors created by this connection. + """ + self._verify_connected() + obj_type_impl = await self._impl.get_type(self, name) + return DbObjectType._from_impl(obj_type_impl) + + async def ping(self) -> None: + """ + Pings the database to verify if the connection is valid. + """ + self._verify_connected() + await self._impl.ping() + + async def resume_sessionless_transaction( + self, + transaction_id: Union[str, bytes], + timeout: int = 60, + defer_round_trip: bool = False, + ) -> bytes: + """ + Resumes an existing sessionless transaction using the specified + transaction identifier. This method returns the transaction identifier + used to resume the sessionless transaction. + + The ``transaction_id`` parameter should be a string or bytes value that + uniquely identifies an existing sessionless transaction that is to be + resumed. + + The ``timeout`` parameter is the number of seconds that the current + connection waits to resume a transaction if another connection is using + it. When ``defer_round_trip`` is set to *False*, the wait happens in + the ``resume_sessionless_transaction()`` call itself, and the function + blocks until the transaction becomes available or the timeout expires. + When ``defer_round_trip`` is set to *True*, the resume is deferred and + the wait occurs at the time of the next database operation instead. At + the start of the wait period, if the transaction is not in use by any + other connection, the resume happens immediately. If the transaction + remains in use by the other connection after the timeout period, the + error `ORA-25351 + `__ is raised. If + another connection completes the transaction, the error `ORA-24756 + `__ is raised. + These error messages are only thrown for non-RAC instances. For + information on using Oracle RAC, see :ref:`Sessionless Transactions + with Oracle RAC `. The default value is *60* + seconds. + + The ``defer_round_trip`` parameter is a boolean that determines whether + the request to resume a transaction is to be sent immediately or with + the next database operation. If set to *False*, the request is sent + immediately. If set to *True*, the request is included with the next + database operation on the connection. The default value is *False*. + """ + self._verify_connected() + if transaction_id is None: + raise ValueError("transaction_id is required for resuming") + + normalized_txnid = normalize_sessionless_transaction_id(transaction_id) + + if not (isinstance(timeout, int) and timeout >= 0): + raise TypeError("timeout must be a non-negative integer") + + await self._impl.resume_sessionless_transaction( + normalized_txnid, timeout, defer_round_trip + ) + return normalized_txnid + + async def rollback(self) -> None: + """ + Rolls back any pending transaction. + """ + self._verify_connected() + await self._impl.rollback() + + async def run_pipeline( + self, + pipeline: Pipeline, + continue_on_error: bool = False, + ) -> list[PipelineOpResult]: + """ + Runs all of the operations in the pipeline and returns a list of + PipelineOpResult, each entry corresponding to an operation executed in + the pipeline. + + The ``continue_on_error`` parameter determines whether operations + should continue to run after an error has occurred. If this parameter + is set to *True*, then the :attr:`PipelineOpResult.error` attribute + will be populated with an :ref:`_Error ` instance which + identifies the error that occurred. If this parameter is set to + *False*, then an exception will be raised as soon as an error is + detected and all subsequent operations will be terminated. The default + value is *False*. + """ + self._verify_connected() + results = [op._create_result() for op in pipeline.operations] + if self._impl.supports_pipelining() and len(results) > 1: + await self._impl.run_pipeline_with_pipelining( + self, results, continue_on_error + ) + else: + await self._impl.run_pipeline_without_pipelining( + self, results, continue_on_error + ) + return results + + async def suspend_sessionless_transaction(self) -> None: + """ + Suspends the currently active sessionless transaction immediately. + + This detaches the transaction from the connection, allowing it to be + resumed later with the transaction identifier that was specified during + creation of the sessionless transaction. The ``timeout`` previously + passed to :meth:`AsyncConnection.begin_sessionless_transaction()` + determines how long the transaction can stay suspended before it is + automatically rolled back. + """ + self._verify_connected() + await self._impl.suspend_sessionless_transaction() + + async def tpc_begin( + self, xid: Xid, flags: int = oracledb.TPC_BEGIN_NEW, timeout: int = 0 + ) -> None: + """ + Begins a Two-Phase Commit (TPC) on a global transaction using the + specified transaction identifier (xid). + + The ``xid`` parameter should be an object returned by the + :meth:`xid()` method. + + The ``flags`` parameter is one of the constants + :data:`oracledb.TPC_BEGIN_JOIN`, :data:`oracledb.TPC_BEGIN_NEW`, + :data:`oracledb.TPC_BEGIN_PROMOTE`, or + :data:`oracledb.TPC_BEGIN_RESUME`. The default is + :data:`oracledb.TPC_BEGIN_NEW`. + + The ``timeout`` parameter is the number of seconds to wait for a + transaction to become available for resumption when + :data:`~oracledb.TPC_BEGIN_RESUME` is specified in the ``flags`` + parameter. When :data:`~oracledb.TPC_BEGIN_NEW` is specified in the + ``flags`` parameter, the ``timeout`` parameter indicates the number of + seconds the transaction can be inactive before it is automatically + terminated by the system. A transaction is inactive between the time it + is detached with :meth:`AsyncConnection.tpc_end()` and the time it is + resumed with :meth:`AsyncConnection.tpc_begin()`.The default is *0* + seconds. + """ + self._verify_connected() + self._verify_xid(xid) + if flags not in ( + oracledb.TPC_BEGIN_NEW, + oracledb.TPC_BEGIN_JOIN, + oracledb.TPC_BEGIN_RESUME, + oracledb.TPC_BEGIN_PROMOTE, + ): + errors._raise_err(errors.ERR_INVALID_TPC_BEGIN_FLAGS) + await self._impl.tpc_begin(xid, flags, timeout) + + async def tpc_commit( + self, xid: Optional[Xid] = None, one_phase: bool = False + ) -> None: + """ + Commits a global transaction. When called with no arguments, this + method commits a transaction previously prepared with + :meth:`~AsyncConnection.tpc_begin()` and optionally prepared with + :meth:`~AsyncConnection.tpc_prepare()`. If + :meth:`~AsyncConnection.tpc_prepare()` is not called, a single phase + commit is performed. A transaction manager may choose to do this if + only a single resource is participating in the global transaction. + + If an ``xid`` parameter is passed, then an object should be returned by + the :meth:`~Connection.xid()` function. This form should be called + outside of a transaction and is intended for use in recovery. + + The ``one_phase`` parameter is a boolean identifying whether to perform + a one-phase or two-phase commit. If ``one_phase`` parameter is *True*, + a single-phase commit is performed. The default value is *False*. This + parameter is only examined if a value is provided for the ``xid`` + parameter. Otherwise, the driver already knows whether + :meth:`tpc_prepare()` was called for the transaction and whether a + one-phase or two-phase commit is required. + """ + self._verify_connected() + if xid is not None: + self._verify_xid(xid) + await self._impl.tpc_commit(xid, one_phase) + + async def tpc_end( + self, xid: Optional[Xid] = None, flags: int = oracledb.TPC_END_NORMAL + ) -> None: + """ + Ends or suspends work on a global transaction. This function is only + intended for use by transaction managers. + + If an ``xid`` parameter is passed, then an object should be returned by + the :meth:`~Connection.xid()` function. If no xid parameter is passed, + then the transaction identifier used by the previous + :meth:`~Connection.tpc_begin()` is used. + + The ``flags`` parameter is one of the constants + :data:`oracledb.TPC_END_NORMAL` or :data:`oracledb.TPC_END_SUSPEND`. + The default is :data:`oracledb.TPC_END_NORMAL`. + + If the flag is :data:`oracledb.TPC_END_SUSPEND` then the transaction + may be resumed later by calling :meth:`AsyncConnection.tpc_begin()` + with the flag :data:`oracledb.TPC_BEGIN_RESUME`. + """ + self._verify_connected() + if xid is not None: + self._verify_xid(xid) + if flags not in (oracledb.TPC_END_NORMAL, oracledb.TPC_END_SUSPEND): + errors._raise_err(errors.ERR_INVALID_TPC_END_FLAGS) + await self._impl.tpc_end(xid, flags) + + async def tpc_forget(self, xid: Xid) -> None: + """ + Causes the database to forget a heuristically completed TPC + transaction. This function is only intended to be called by + transaction managers. + + The ``xid`` parameter is mandatory and should be an object should be + returned by the :meth:`xid()` function. + """ + self._verify_connected() + self._verify_xid(xid) + await self._impl.tpc_forget(xid) + + async def tpc_prepare(self, xid: Optional[Xid] = None) -> bool: + """ + Prepares a two-phase transaction for commit. After this function is + called, no further activity should take place on this connection until + either :meth:`tpc_commit()` or :meth:`tpc_rollback()` have been called. + + Returns a boolean indicating whether a commit is needed or not. If you + attempt to commit when not needed, then it results in the error + ``ORA-24756: transaction does not exist``. + + If an ``xid`` parameter is passed, then an object should be returned by + the :meth:`xid()` function. If an ``xid`` parameter is not passed, then + the transaction identifier used by the previous :meth:`tpc_begin()` is + used. + """ + self._verify_connected() + if xid is not None: + self._verify_xid(xid) + return await self._impl.tpc_prepare(xid) + + async def tpc_recover(self) -> list: + """ + Returns a list of pending transaction identifiers that require + recovery. Objects of type ``Xid`` (as returned by the + :meth:`~Connection.xid()` function) are returned and these can be + passed to :meth:`tpc_commit()` or :meth:`tpc_rollback()` as needed. + + This function queries the view ``DBA_PENDING_TRANSACTIONS`` and + requires ``SELECT`` privilege on that view. + """ + with self.cursor() as cursor: + await cursor.execute( + """ + select + formatid, + globalid, + branchid + from dba_pending_transactions""" + ) + cursor.rowfactory = Xid + return await cursor.fetchall() + + async def tpc_rollback(self, xid: Optional[Xid] = None) -> None: + """ + Rolls back a global transaction. + + If an ``xid`` parameter is not passed, then it rolls back the + transaction that was previously started with + :meth:`~AsyncConnection.tpc_begin()`. + + If an ``xid`` parameter is passed, then an object should be returned by + :meth:`~Connection.xid()` and the specified transaction is rolled back. + This form should be called outside of a transaction and is intended for + use in recovery. + """ + self._verify_connected() + if xid is not None: + self._verify_xid(xid) + await self._impl.tpc_rollback(xid) + + +def _async_connection_factory( + f: Callable[..., AsyncConnection], +) -> Callable[..., AsyncConnection]: + """ + Decorator which checks the validity of the supplied keyword parameters by + calling the original function (which does nothing), then creates and + returns an instance of the requested AsyncConnection class. + """ + + @functools.wraps(f) + def connect_async( + dsn: Optional[str] = None, + *, + pool: Optional["pool_module.AsyncConnectionPool"] = None, + pool_alias: Optional[str] = None, + conn_class: Type[AsyncConnection] = AsyncConnection, + params: Optional[ConnectParams] = None, + **kwargs, + ) -> AsyncConnection: + # check arguments + f( + dsn=dsn, + pool=pool, + pool_alias=pool_alias, + conn_class=conn_class, + params=params, + **kwargs, + ) + if not issubclass(conn_class, AsyncConnection): + errors._raise_err(errors.ERR_INVALID_CONN_CLASS) + + if pool is not None and pool_alias is not None: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="pool", + new_name="pool_alias", + ) + if pool_alias is not None: + pool = pool_module.named_pools.pools.get(pool_alias) + if pool is None: + errors._raise_err( + errors.ERR_NAMED_POOL_MISSING, alias=pool_alias + ) + if pool is not None and not isinstance( + pool, pool_module.AsyncConnectionPool + ): + message = ( + "pool must be an instance of oracledb.AsyncConnectionPool" + ) + raise TypeError(message) + if params is not None and not isinstance(params, ConnectParams): + errors._raise_err(errors.ERR_INVALID_CONNECT_PARAMS) + + # build connection class and call the implementation connect to + # actually establish the connection + oracledb.enable_thin_mode() + return conn_class(dsn, pool, params, kwargs) + + return connect_async + + +@_async_connection_factory +def connect_async( + dsn: Optional[str] = None, + *, + pool: Optional["pool_module.AsyncConnectionPool"] = None, + pool_alias: Optional[str] = None, + conn_class: Type[AsyncConnection] = AsyncConnection, + params: Optional[ConnectParams] = None, + user: Optional[str] = None, + proxy_user: Optional[str] = None, + password: Optional[str] = None, + newpassword: Optional[str] = None, + wallet_password: Optional[str] = None, + access_token: Optional[Union[str, tuple, Callable]] = None, + host: Optional[str] = None, + port: Optional[int] = None, + protocol: Optional[str] = None, + https_proxy: Optional[str] = None, + https_proxy_port: Optional[int] = None, + service_name: Optional[str] = None, + instance_name: Optional[str] = None, + sid: Optional[str] = None, + server_type: Optional[str] = None, + cclass: Optional[str] = None, + purity: Optional[oracledb.Purity] = None, + expire_time: Optional[int] = None, + retry_count: Optional[int] = None, + retry_delay: Optional[int] = None, + tcp_connect_timeout: Optional[float] = None, + ssl_server_dn_match: Optional[bool] = None, + ssl_server_cert_dn: Optional[str] = None, + wallet_location: Optional[str] = None, + events: Optional[bool] = None, + externalauth: Optional[bool] = None, + mode: Optional[oracledb.AuthMode] = None, + disable_oob: Optional[bool] = None, + stmtcachesize: Optional[int] = None, + edition: Optional[str] = None, + tag: Optional[str] = None, + matchanytag: Optional[bool] = None, + config_dir: Optional[str] = None, + appcontext: Optional[list] = None, + shardingkey: Optional[list] = None, + supershardingkey: Optional[list] = None, + debug_jdwp: Optional[str] = None, + connection_id_prefix: Optional[str] = None, + ssl_context: Optional[Any] = None, + sdu: Optional[int] = None, + pool_boundary: Optional[str] = None, + use_tcp_fast_open: Optional[bool] = None, + ssl_version: Optional[ssl.TLSVersion] = None, + program: Optional[str] = None, + machine: Optional[str] = None, + terminal: Optional[str] = None, + osuser: Optional[str] = None, + driver_name: Optional[str] = None, + use_sni: Optional[bool] = None, + thick_mode_dsn_passthrough: Optional[bool] = None, + extra_auth_params: Optional[dict] = None, + pool_name: Optional[str] = None, + handle: Optional[int] = None, +) -> AsyncConnection: + """ + Factory function which creates a connection to the database and returns it. + + The ``dsn`` parameter (data source name) can be a string in the format + user/password@connect_string or can simply be the connect string (in + which case authentication credentials such as the username and password + need to be specified separately). See the documentation on connection + strings for more information. + + The ``pool`` parameter is expected to be a pool object. This parameter was + deprecated in python-oracledb 3.0.0. Use :meth:`ConnectionPool.acquire()` + instead since the use of this parameter is the equivalent of calling this + method. + + The ``conn_class`` parameter is expected to be AsyncConnection or a + subclass of AsyncConnection. + + The ``params`` parameter is expected to be of type ConnectParams and + contains connection parameters that will be used when establishing the + connection. See the documentation on ConnectParams for more information. If + this parameter is not specified, the additional keyword parameters will be + used to create an instance of ConnectParams. If both the ``params`` + parameter and additional keyword parameters are specified, the values in + the keyword parameters have precedence. Note that if a ``dsn`` is also + supplied, then in python-oracledb Thin mode, the values of the parameters + specified (if any) within the dsn will override the values passed as + additional keyword parameters, which themselves override the values set in + the ``params`` parameter object. + + The following parameters are all optional. A brief description of each + parameter follows: + + - ``user``: the name of the database user to connect to + (default: None) + + - ``proxy_user``: the name of the proxy user to connect to. If this value + is not specified, it will be parsed out of user if user is in the form + "user[proxy_user]" + (default: None) + + - ``password``: the password for the database user + (default: None) + + - ``newpassword``: a new password for the database user. The new password + will take effect immediately upon a successful connection to the database + (default: None) + + - ``wallet_password``: the password to use to decrypt the wallet, if it is + encrypted. This is not the database password. For Oracle Autonomous + Database this is the password created when downloading the wallet. This + value is only used in python-oracledb Thin mode. + (default: None) + + - ``access_token``: a string, or a 2-tuple, or a callable. If it is a + string, it specifies an Entra ID OAuth2 token used for Open Authorization + (OAuth 2.0) token based authentication. If it is a 2-tuple, it specifies + the token and private key strings used for Oracle Cloud Infrastructure + (OCI) Identity and Access Management (IAM) token based authentication. If + it is a callable, it returns either a string or a 2-tuple used for OAuth + 2.0 or OCI IAM token based authentication and is useful when the pool + needs to expand and create new connections but the current authentication + token has expired + (default: None) + + - ``host``: the hostname or IP address of the machine hosting the database + or the database listener + (default: None) + + - ``port``: the port number on which the database listener is listening + (default: 1521) + + - ``protocol``: one of the strings "tcp" or "tcps" indicating whether to + use unencrypted network traffic or encrypted network traffic (TLS) + (default: "tcp") + + - ``https_proxy``: the hostname or IP address of a proxy host to use for + tunneling secure connections + (default: None) + + - ``https_proxy_port``: the port on which to communicate with the proxy + host + (default: 0) + + - ``service_name``: the service name of the database + (default: None) + + - ``instance_name``: the instance name of the database + (default: None) + + - ``sid``: the system identifier (SID) of the database. Note using a + service_name instead is recommended + (default: None) + + - ``server_type``: the type of server connection that should be + established. If specified, it should be one of strings "dedicated", + "shared" or "pooled" + (default: None) + + - ``cclass``: the connection class to use for Database Resident Connection + Pooling (DRCP) + (default: None) + + - ``purity``: the connection purity to use for Database Resident Connection + Pooling (DRCP) + (default: :attr:`oracledb.PURITY_DEFAULT`) + + - ``expire_time``: the number of minutes between the sending of keepalive + probes. If this parameter is set to a value greater than zero it enables + keepalive + (default: 0) + + - ``retry_count``: the number of times that initial connection + establishment should be retried before the connection attempt is + terminated + (default: 0) + + - ``retry_delay``: the number of seconds to wait before retrying to + establish a connection + (default: 1) + + - ``tcp_connect_timeout``: a float indicating the maximum number of seconds + to wait when establishing a connection to the database host + (default: 20.0) + + - ``ssl_server_dn_match``: a boolean indicating whether the server + certificate distinguished name (DN) should be matched in addition to the + regular certificate verification that is performed. Note that if the + ssl_server_cert_dn parameter is not privided, host name matching is + performed instead + (default: True) + + - ``ssl_server_cert_dn``: the distinguished name (DN) which should be + matched with the server. This value is ignored if the ssl_server_dn_match + parameter is not set to the value True. If specified this value is used + for any verfication. Otherwise the hostname will be used + (default: None) + + - ``wallet_location``: the directory where the wallet can be found. In + python-oracledb Thin mode this must be the directory containing the PEM- + encoded wallet file ewallet.pem. In python-oracledb Thick mode this must + be the directory containing the file cwallet.sso + (default: None) + + - ``events``: a boolean specifying whether events mode should be enabled. + This value is only used in python-oracledb Thick mode and is needed for + continuous query notification and high availability event notifications + (default: False) + + - ``externalauth``: a boolean indicating whether to use external + authentication + (default: False) + + - ``mode``: the authorization mode to use. One of the constants + :data:`oracledb.AUTH_MODE_DEFAULT`, :data:`oracledb.AUTH_MODE_PRELIM`, + :data:`oracledb.AUTH_MODE_SYSASM`, :data:`oracledb.AUTH_MODE_SYSBKP`, + :data:`oracledb.AUTH_MODE_SYSDBA`, :data:`oracledb.AUTH_MODE_SYSDGD`, + :data:`oracledb.AUTH_MODE_SYSKMT`, :data:`oracledb.AUTH_MODE_SYSOPER`, or + :data:`oracledb.AUTH_MODE_SYSRAC` + (default: :attr:`oracledb.AUTH_MODE_DEFAULT`) + + - ``disable_oob``: a boolean indicating whether out-of-band breaks should + be disabled. This value is only used in python-oracledb Thin mode. It has + no effect on Windows which does not support this functionality + (default: False) + + - ``stmtcachesize``: the size of the statement cache + (default: :attr:`oracledb.defaults.stmtcachesize + `) + + - ``edition``: edition to use for the connection. This parameter cannot be + used simultaneously with the cclass parameter + (default: None) + + - ``tag``: identifies the type of connection that should be returned from a + pool. This value is only used in python-oracledb Thick mode + (default: None) + + - ``matchanytag``: a boolean specifying whether any tag can be used when + acquiring a connection from the pool. This value is only used in python- + oracledb Thick mode + (default: False) + + - ``config_dir``: a directory in which the optional tnsnames.ora + configuration file is located. This value is only used in python-oracledb + Thin mode. For python-oracledb Thick mode, it is used if + :attr:`oracledb.defaults.thick_mode_dsn_passthrough + ` is *False*. Otherwise in Thick + mode use the ``config_dir`` parameter of + :meth:`oracledb.init_oracle_client()` + (default: :attr:`oracledb.defaults.config_dir + `) + + - ``appcontext``: application context used by the connection. It should be + a list of 3-tuples (namespace, name, value) and each entry in the tuple + should be a string + (default: None) + + - ``shardingkey``: a list of strings, numbers, bytes or dates that identify + the database shard to connect to. This value is only used in python- + oracledb Thick mode + (default: None) + + - ``supershardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + (default: None) + + - ``debug_jdwp``: a string with the format "host=;port=" that + specifies the host and port of the PL/SQL debugger. This value is only + used in python-oracledb Thin mode. For python-oracledb Thick mode set + the ORA_DEBUG_JDWP environment variable + (default: None) + + - ``connection_id_prefix``: an application specific prefix that is added to + the connection identifier used for tracing + (default: None) + + - ``ssl_context``: an SSLContext object used for connecting to the database + using TLS. This SSL context will be modified to include the private key + or any certificates found in a separately supplied wallet. This parameter + should only be specified if the default SSLContext object cannot be used + (default: None) + + - ``sdu``: the requested size of the Session Data Unit (SDU), in bytes. The + value tunes internal buffers used for communication to the database. + Bigger values can increase throughput for large queries or bulk data + loads, but at the cost of higher memory use. The SDU size that will + actually be used is negotiated down to the lower of this value and the + database network SDU configuration value + (default: 8192) + + - ``pool_boundary``: one of the values "statement" or "transaction" + indicating when pooled DRCP connections can be returned to the pool. This + requires the use of DRCP with Oracle Database 23.4 or higher + (default: None) + + - ``use_tcp_fast_open``: a boolean indicating whether to use TCP fast open. + This is an Oracle Autonomous Database Serverless (ADB-S) specific + property for clients connecting from within OCI Cloud network. Please + refer to the ADB-S documentation for more information + (default: False) + + - ``ssl_version``: one of the values ssl.TLSVersion.TLSv1_2 or + ssl.TLSVersion.TLSv1_3 indicating which TLS version to use + (default: None) + + - ``program``: a string recorded by Oracle Database as the program from + which the connection originates + (default: :attr:`oracledb.defaults.program + `) + + - ``machine``: a string recorded by Oracle Database as the name of the + machine from which the connection originates + (default: :attr:`oracledb.defaults.machine + `) + + - ``terminal``: a string recorded by Oracle Database as the terminal + identifier from which the connection originates + (default: :attr:`oracledb.defaults.terminal + `) + + - ``osuser``: a string recorded by Oracle Database as the operating system + user who originated the connection + (default: :attr:`oracledb.defaults.osuser + `) + + - ``driver_name``: a string recorded by Oracle Database as the name of the + driver which originated the connection + (default: :attr:`oracledb.defaults.driver_name + `) + + - ``use_sni``: a boolean indicating whether to use the TLS SNI extension to + bypass the second TLS neogiation that would otherwise be required + (default: False) + + - ``thick_mode_dsn_passthrough``: a boolean indicating whether to pass the + connect string to the Oracle Client libraries unchanged without parsing + by the driver. Setting this to False makes python-oracledb Thick and Thin + mode applications behave similarly regarding connection string parameter + handling and locating any optional tnsnames.ora configuration file + (default: :attr:`oracledb.defaults.thick_mode_dsn_passthrough + `) + + - ``extra_auth_params``: a dictionary containing configuration parameters + necessary for Oracle Database authentication using plugins, such as the + Azure and OCI cloud-native authentication plugins + (default: None) + + - ``pool_name``: the name of the DRCP pool when using multi-pool DRCP with + Oracle Database 23.4, or higher + (default: None) + + - ``handle``: an integer representing a pointer to a valid service context + handle. This value is only used in python-oracledb Thick mode. It should + be used with extreme caution + (default: 0) + """ + pass diff --git a/.venv/lib/python3.9/site-packages/oracledb/constants.py b/.venv/lib/python3.9/site-packages/oracledb/constants.py new file mode 100644 index 0000000..e7b556d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/constants.py @@ -0,0 +1,127 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2020, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# constants.py +# +# Contains the constants defined by the package. +# ----------------------------------------------------------------------------- + +# AQ delivery modes +MSG_BUFFERED = 2 +MSG_PERSISTENT = 1 +MSG_PERSISTENT_OR_BUFFERED = 3 + +# AQ dequeue modes +DEQ_BROWSE = 1 +DEQ_LOCKED = 2 +DEQ_REMOVE = 3 +DEQ_REMOVE_NODATA = 4 + +# AQ dequeue navigation modes +DEQ_FIRST_MSG = 1 +DEQ_NEXT_MSG = 3 +DEQ_NEXT_TRANSACTION = 2 + +# AQ dequeue visibility modes +DEQ_IMMEDIATE = 1 +DEQ_ON_COMMIT = 2 + +# AQ dequeue wait modes +DEQ_NO_WAIT = 0 +DEQ_WAIT_FOREVER = 2**32 - 1 + +# AQ enqueue visibility modes +ENQ_IMMEDIATE = 1 +ENQ_ON_COMMIT = 2 + +# AQ message states +MSG_EXPIRED = 3 +MSG_PROCESSED = 2 +MSG_READY = 0 +MSG_WAITING = 1 + +# AQ other constants +MSG_NO_DELAY = 0 +MSG_NO_EXPIRATION = -1 + +# shutdown modes +DBSHUTDOWN_ABORT = 4 +DBSHUTDOWN_FINAL = 5 +DBSHUTDOWN_IMMEDIATE = 3 +DBSHUTDOWN_TRANSACTIONAL = 1 +DBSHUTDOWN_TRANSACTIONAL_LOCAL = 2 + +# subscription grouping classes +SUBSCR_GROUPING_CLASS_NONE = 0 +SUBSCR_GROUPING_CLASS_TIME = 1 + +# subscription grouping types +SUBSCR_GROUPING_TYPE_SUMMARY = 1 +SUBSCR_GROUPING_TYPE_LAST = 2 + +# subscription namespaces +SUBSCR_NAMESPACE_AQ = 1 +SUBSCR_NAMESPACE_DBCHANGE = 2 + +# subscription protocols +SUBSCR_PROTO_HTTP = 3 +SUBSCR_PROTO_MAIL = 1 +SUBSCR_PROTO_CALLBACK = 0 +SUBSCR_PROTO_SERVER = 2 + +# subscription quality of service +SUBSCR_QOS_BEST_EFFORT = 0x10 +SUBSCR_QOS_DEFAULT = 0 +SUBSCR_QOS_DEREG_NFY = 0x02 +SUBSCR_QOS_QUERY = 0x08 +SUBSCR_QOS_RELIABLE = 0x01 +SUBSCR_QOS_ROWIDS = 0x04 + +# event types +EVENT_AQ = 100 +EVENT_DEREG = 5 +EVENT_NONE = 0 +EVENT_OBJCHANGE = 6 +EVENT_QUERYCHANGE = 7 +EVENT_SHUTDOWN = 2 +EVENT_SHUTDOWN_ANY = 3 +EVENT_STARTUP = 1 + +# operation codes +OPCODE_ALLOPS = 0 +OPCODE_ALLROWS = 0x01 +OPCODE_ALTER = 0x10 +OPCODE_DELETE = 0x08 +OPCODE_DROP = 0x20 +OPCODE_INSERT = 0x02 +OPCODE_UPDATE = 0x04 + +# flags for tpc_end() +TPC_END_NORMAL = 0 +TPC_END_SUSPEND = 0x00100000 + +# vector metadata flags +VECTOR_META_FLAG_FLEXIBLE_DIM = 0x01 +VECTOR_META_FLAG_SPARSE_VECTOR = 0x02 diff --git a/.venv/lib/python3.9/site-packages/oracledb/constructors.py b/.venv/lib/python3.9/site-packages/oracledb/constructors.py new file mode 100644 index 0000000..8a58245 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/constructors.py @@ -0,0 +1,100 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2020, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# constructors.py +# +# Contains the constructors mandated by the Python Database API. +# ----------------------------------------------------------------------------- + +import datetime +from typing import Any + +from . import errors + + +def Binary(value: Any) -> bytes: + """ + Constructs an object holding a binary (long) string value. + """ + return bytes(value) + + +def Date(year: int, month: int, day: int) -> datetime.date: + """ + Constructs an object holding a date value. + """ + return datetime.date(year, month, day) + + +def DateFromTicks(ticks: float) -> datetime.date: + """ + Constructor mandated by the database API for creating a date value given + the number of seconds since the epoch (January 1, 1970). This is equivalent + to using datetime.date.fromtimestamp() and that should be used instead. + """ + return datetime.date.fromtimestamp(ticks) + + +def Time(hour: int, minute: int, second: int) -> None: + """ + Constructor mandated by the database API for creating a time value. Since + Oracle doesn't support time only values, an exception is raised when this + method is called. + """ + errors._raise_err(errors.ERR_TIME_NOT_SUPPORTED) + + +def TimeFromTicks(ticks: float) -> None: + """ + Constructor mandated by the database API for creating a time value given + the number of seconds since the epoch (January 1, 1970). Since Oracle + doesn't support time only values, an exception is raised when this method + is called. + """ + errors._raise_err(errors.ERR_TIME_NOT_SUPPORTED) + + +def Timestamp( + year: int, + month: int, + day: int, + hour: int = 0, + minute: int = 0, + second: int = 0, +) -> datetime.datetime: + """ + Constructs an object holding a time stamp value. + """ + return datetime.datetime(year, month, day, hour, minute, second) + + +def TimestampFromTicks(ticks: float) -> datetime.datetime: + """ + Constructor mandated by the database API for creating a timestamp value + given the number of seconds since the epoch (January 1, 1970). This is + equivalent to using datetime.datetime.fromtimestamp() and that should be + used instead. + """ + return datetime.datetime.fromtimestamp(ticks) diff --git a/.venv/lib/python3.9/site-packages/oracledb/cursor.py b/.venv/lib/python3.9/site-packages/oracledb/cursor.py new file mode 100644 index 0000000..7d1a2ed --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/cursor.py @@ -0,0 +1,1373 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# cursor.py +# +# Contains the Cursor class used for executing statements on connections and +# fetching results from queries. +# ----------------------------------------------------------------------------- + +from typing import Any, Union, Callable, Optional + +from . import connection as connection_module +from . import errors +from . import utils +from .base import BaseMetaClass +from .base_impl import DbType, DB_TYPE_OBJECT +from .dbobject import DbObjectType +from .fetch_info import FetchInfo +from .var import Var + + +class BaseCursor(metaclass=BaseMetaClass): + _impl = None + + def __init__( + self, + connection: "connection_module.Connection", + scrollable: bool = False, + ) -> None: + self._connection = connection + self._impl = connection._impl.create_cursor_impl(scrollable) + + def __del__(self): + if self._impl is not None: + self._impl.close(in_del=True) + + def __enter__(self): + """ + The entry point for the cursor as a context manager. It returns itself. + """ + self._verify_open() + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + """ + The exit point for the cursor as a context manager. It closes the + cursor. + """ + self._verify_open() + self._impl.close(in_del=True) + self._impl = None + + def __repr__(self): + cls_name = self.__class__._public_name + return f"<{cls_name} on {self.connection!r}>" + + def _call( + self, + name: str, + parameters: Union[list, tuple], + keyword_parameters: dict, + return_value: Any = None, + ) -> None: + """ + Internal method used for generating the PL/SQL block used to call + stored procedures. + """ + utils.verify_stored_proc_args(parameters, keyword_parameters) + self._verify_open() + statement, bind_values = self._call_get_execute_args( + name, parameters, keyword_parameters, return_value + ) + return self.execute(statement, bind_values) + + def _call_get_execute_args( + self, + name: str, + parameters: Union[list, tuple], + keyword_parameters: dict, + return_value: str = None, + ) -> None: + """ + Internal method used for generating the PL/SQL block used to call + stored procedures and functions. A tuple containing this statement and + the bind values is returned. + """ + bind_names = [] + bind_values = [] + statement_parts = ["begin "] + if return_value is not None: + statement_parts.append(":retval := ") + bind_values.append(return_value) + statement_parts.append(name + "(") + if parameters: + bind_values.extend(parameters) + bind_names = [":%d" % (i + 1) for i in range(len(parameters))] + if keyword_parameters: + for arg_name, arg_value in keyword_parameters.items(): + bind_values.append(arg_value) + bind_names.append(f"{arg_name} => :{len(bind_names) + 1}") + statement_parts.append(",".join(bind_names)) + statement_parts.append("); end;") + statement = "".join(statement_parts) + return (statement, bind_values) + + def _normalize_statement(self, statement: Optional[str]) -> Optional[str]: + """ + Normalizes a statement by stripping leading and trailing spaces. If the + result is an empty string, an error is raised immediately. + """ + if statement is not None: + statement = statement.strip() + if not statement: + errors._raise_err(errors.ERR_EMPTY_STATEMENT) + return statement + + def _prepare( + self, statement: str, tag: str = None, cache_statement: bool = True + ) -> None: + """ + Internal method used for preparing a statement for execution. + """ + self._impl.prepare(statement, tag, cache_statement) + + def _prepare_for_execute( + self, statement, parameters, keyword_parameters=None + ): + """ + Internal method for preparing a statement for execution. + """ + self._verify_open() + self._impl._prepare_for_execute( + self, + self._normalize_statement(statement), + parameters, + keyword_parameters, + ) + + def _verify_fetch(self) -> None: + """ + Verifies that fetching is possible from this cursor. + """ + self._verify_open() + if not self._impl.is_query(self): + errors._raise_err(errors.ERR_NOT_A_QUERY) + + def _verify_open(self) -> None: + """ + Verifies that the cursor is open and the associated connection is + connected. If either condition is false an exception is raised. + """ + if self._impl is None: + errors._raise_err(errors.ERR_CURSOR_NOT_OPEN) + self.connection._verify_connected() + + @property + def arraysize(self) -> int: + """ + This read-write attribute can be used to tune the number of rows + internally fetched and buffered by internal calls to the database when + fetching rows from SELECT statements and REF CURSORS. + + The value of ``arraysize`` can drastically affect the performance of a + query since it directly affects the number of network round trips + between Python and the database. For methods like :meth:`fetchone()` + and :meth:`fetchall()` it affects internal behavior but does not change + how many rows are returned to the application. For :meth:`fetchmany()` + it is the default number of rows to fetch. + + The attribute is only used for tuning row and SODA document fetches + from the database. It does not affect data inserts. + + Due to the performance benefits, the default ``arraysize`` is *100* + instead of the *1* that the Python DB API recommends. + """ + self._verify_open() + return self._impl.arraysize + + @arraysize.setter + def arraysize(self, value: int) -> None: + self._verify_open() + if not isinstance(value, int) or value <= 0: + errors._raise_err(errors.ERR_INVALID_ARRAYSIZE) + self._impl.arraysize = value + + def arrayvar( + self, + typ: Union[DbType, DbObjectType, type], + value: Union[list, int], + size: int = 0, + ) -> Var: + """ + Creates an array variable associated with the cursor of the given type + and size and returns a :ref:`variable object `. The value is + either an integer specifying the number of elements to allocate or it + is a list and the number of elements allocated is drawn from the size + of the list. If the value is a list, the variable is also set with the + contents of the list. If the size is not specified and the type is a + string or binary, 4000 bytes is allocated. This is needed for passing + arrays to PL/SQL (in cases where the list might be empty and the type + cannot be determined automatically) or returning arrays from PL/SQL. + """ + self._verify_open() + if isinstance(value, list): + num_elements = len(value) + elif isinstance(value, int): + num_elements = value + else: + raise TypeError("expecting integer or list of values") + var = self._impl.create_var( + self.connection, + typ, + size=size, + num_elements=num_elements, + is_array=True, + ) + if isinstance(value, list): + var.setvalue(0, value) + return var + + def bindnames(self) -> list: + """ + Returns the list of bind variable names bound to the statement. Note + that a statement must have been prepared first. + """ + self._verify_open() + if self._impl.statement is None: + errors._raise_err(errors.ERR_NO_STATEMENT_PREPARED) + return self._impl.get_bind_names() + + @property + def bindvars(self) -> list: + """ + This read-only attribute provides the bind variables used for the last + statement that was executed on the cursor. The value will be either a + list or a dictionary, depending on whether binding was done by position + or name. Care should be taken when referencing this attribute. In + particular, elements should not be removed or replaced. + """ + self._verify_open() + return self._impl.get_bind_vars() + + def close(self) -> None: + """ + Closes the cursor now, rather than whenever ``__del__`` is called. The + cursor will be unusable from this point forward; an Error exception + will be raised if any operation is attempted with the cursor. + """ + self._verify_open() + self._impl.close() + self._impl = None + + @property + def description(self) -> Union[list[FetchInfo], None]: + """ + This read-only attribute contains information about the columns used in + a query. It is a list of FetchInfo objects, one per column. This + attribute will be *None* for statements that are not SELECT or WITH + statements, or if the cursor has not had :meth:`execute()` invoked yet. + """ + self._verify_open() + if self._impl.is_query(self): + return [FetchInfo._from_impl(i) for i in self._impl.fetch_metadata] + + @property + def fetchvars(self) -> list: + """ + This read-only attribute specifies the list of variables created for + the last SELECT query that was executed on the cursor. Care should be + taken when referencing this attribute. In particular, elements should + not be removed or replaced. + """ + self._verify_open() + return self._impl.get_fetch_vars() + + def getarraydmlrowcounts(self) -> list: + """ + Retrieves the DML row counts after a call to :meth:`executemany()` with + ``arraydmlrowcounts`` enabled. This will return a list of integers + corresponding to the number of rows affected by the DML statement for + each element of the array passed to :meth:`executemany()`. + + This method is only available for Oracle Database 12.1 and later. + """ + self._verify_open() + return self._impl.get_array_dml_row_counts() + + def getbatcherrors(self) -> list: + """ + Retrieves the exceptions that took place after a call to + :meth:`executemany()` with ``batcherrors`` enabled. This will + return a list of Error objects, one error for each iteration that + failed. The offset can be determined by looking at the offset attribute + of the error object. + """ + self._verify_open() + return self._impl.get_batch_errors() + + def getimplicitresults(self) -> list: + """ + Returns a list of cursors which correspond to implicit results made + available from a PL/SQL block or procedure without the use of OUT ref + cursor parameters. The PL/SQL block or procedure opens the cursors and + marks them for return to the client using the procedure + dbms_sql.return_result. In python-oracledb Thick mode, closing the + parent cursor will result in the automatic closure of the implicit + result set cursors. See :ref:`implicitresults`. + + This method is only available for Oracle Database 12.1 (or later). For + python-oracledb :ref:`Thick ` mode, Oracle Client 12.1 + (or later) is additionally required. + """ + self._verify_open() + return self._impl.get_implicit_results(self.connection) + + @property + def inputtypehandler(self) -> Callable: + """ + This read-write attribute specifies a method called for each value that + is bound to a statement executed on the cursor and overrides the + attribute with the same name on the connection if specified. The method + signature is handler(cursor, value, arraysize) and the return value is + expected to be a variable object or *None* in which case a default + variable object will be created. If this attribute is *None*, the + default behavior will take place for all values bound to the + statements. + """ + self._verify_open() + return self._impl.inputtypehandler + + @inputtypehandler.setter + def inputtypehandler(self, value: Callable) -> None: + self._verify_open() + self._impl.inputtypehandler = value + + @property + def lastrowid(self) -> str: + """ + This read-only attribute returns the rowid of the last row modified by + the cursor. If no row was modified by the last operation performed on + the cursor, the value *None* is returned. + """ + self._verify_open() + return self._impl.get_lastrowid() + + @property + def outputtypehandler(self) -> Callable: + """ + This read-write attribute specifies a method called for each column + that is to be fetched from this cursor. The method signature is + handler(cursor, metadata) and the return value is expected to be a + :ref:`variable object ` or *None* in which case a default + variable object will be created. If this attribute is *None*, then the + default behavior will take place for all columns fetched from this + cursor. + """ + self._verify_open() + return self._impl.outputtypehandler + + @outputtypehandler.setter + def outputtypehandler(self, value: Callable) -> None: + self._verify_open() + self._impl.outputtypehandler = value + + @property + def prefetchrows(self) -> int: + """ + This read-write attribute can be used to tune the number of rows that + python-oracledb initially fetches from Oracle Database when a SELECT + query is executed. The value can improve performance by reducing the + number of round-trips to the database. The attribute does not affect + data insertion. + + In python-oracledb Thin mode, prefetching can reuse the + :attr:`arraysize` buffer. However in Thick mode, extra memory is + required. + + Setting this value to *0* can be useful when the timing of fetches must + be explicitly controlled. + + Queries that return :ref:`LOB ` objects and similar types do + not support prefetching. The ``prefetchrows`` attribute is ignored in + queries that involve these types. + """ + self._verify_open() + return self._impl.prefetchrows + + @prefetchrows.setter + def prefetchrows(self, value: int) -> None: + self._verify_open() + self._impl.prefetchrows = value + + def prepare( + self, statement: str, tag: str = None, cache_statement: bool = True + ) -> None: + """ + This can be used before a call to :meth:`execute()` or + :meth:`executemany()` to define the statement that will be + executed. When this is done, the prepare phase will not be performed + when the call to :meth:`execute()` or :meth:`executemany()` is made + with *None* or the same string object as the statement. + + If the ``tag`` parameter is specified and the ``cache_statement`` + parameter is *True*, the statement will be returned to the statement + cache with the given tag. + + If the ``cache_statement`` parameter is *False*, the statement will be + removed from the statement cache (if it was found there) or will simply + not be cached. + """ + self._verify_open() + self._prepare(statement, tag, cache_statement) + + @property + def rowcount(self) -> int: + """ + This read-only attribute specifies the number of rows that have + currently been fetched from the cursor (for select statements) or that + have been affected by the operation (for insert, update, delete, and + merge statements). For all other statements the value is always *0*. If + the cursor or connection is closed, the value returned is *-1*. + """ + if self._impl is not None and self.connection._impl is not None: + return self._impl.rowcount + return -1 + + @property + def rowfactory(self) -> Callable: + """ + This read-write attribute specifies a method to call for each row that + is retrieved from the database. Ordinarily, a tuple is returned for + each row but if this attribute is set, the method is called with the + tuple that would normally be returned, and the result of the method is + returned instead. + + The ``rowfactory`` attribute should be set after each statement + execution before data is fetched from the cursor. + """ + self._verify_open() + return self._impl.rowfactory + + @rowfactory.setter + def rowfactory(self, value: Callable) -> None: + self._verify_open() + self._impl.rowfactory = value + + @property + def scrollable(self) -> bool: + """ + This read-write boolean attribute specifies whether the cursor can be + scrolled or not. By default, cursors are not scrollable, as the server + resources and response times are greater than nonscrollable cursors. + This attribute is checked and the corresponding mode set in Oracle when + calling the method :meth:`execute()`. + """ + self._verify_open() + return self._impl.scrollable + + @scrollable.setter + def scrollable(self, value: bool) -> None: + self._verify_open() + self._impl.scrollable = value + + def setinputsizes(self, *args: Any, **kwargs: Any) -> Union[list, dict]: + """ + This can be used before calls to :meth:`execute()` or + :meth:`executemany()` to predefine memory areas used for + :ref:`bind variables `. Each parameter should be a type object + corresponding to the data that will be used for a bind variable + placeholder in the SQL or PL/SQL statement. Alternatively, it can be an + integer specifying the maximum length of a string bind variable value. + + Use keyword parameters when :ref:`binding by name `. Use + positional parameters when :ref:`binding by position `. + The parameter value can be *None* to indicate that python-oracledb + should determine the required space from the data value provided. + + The parameters or keyword names correspond to the bind variable + placeholders used in the SQL or PL/SQL statement. Note this means that + for use with :meth:`executemany()` it does not correspond to the number + of bind value mappings or sequences being passed. + + When repeated calls to :meth:`execute()` or :meth:`executemany()` are + made binding different string data lengths, using + :meth:`setinputsizes()` can help reduce the database's SQL "version + count" for the statement. See + :ref:`Reducing the SQL Version Count `. + """ + if args and kwargs: + errors._raise_err(errors.ERR_ARGS_AND_KEYWORD_ARGS) + elif args or kwargs: + self._verify_open() + return self._impl.setinputsizes(self.connection, args, kwargs) + return [] + + def setoutputsize(self, size: int, column: int = 0) -> None: + """ + This method does nothing and is retained solely for compatibility with + the DB API. Python-oracledb automatically allocates as much space as + needed to fetch LONG and LONG RAW columns, and also to fetch CLOB as + string and BLOB as bytes. + """ + pass + + @property + def statement(self) -> Union[str, None]: + """ + This read-only attribute provides the string object that was previously + prepared with :meth:`prepare()` or executed with :meth:`execute()`. + """ + if self._impl is not None: + return self._impl.statement + + def var( + self, + typ: Union[DbType, DbObjectType, type], + size: int = 0, + arraysize: int = 1, + inconverter: Callable = None, + outconverter: Callable = None, + typename: str = None, + encoding_errors: str = None, + bypass_decode: bool = False, + convert_nulls: bool = False, + *, + encodingErrors: str = None, + ) -> "Var": + """ + Creates a :ref:`variable object ` with the specified + characteristics. This method can be used for binding to PL/SQL IN and + OUT parameters where the length or type cannot be determined + automatically from the Python variable being bound. It can also be used + in :ref:`input ` and :ref:`output + ` type handlers. + + The ``typ`` parameter specifies the type of data that should be stored + in the variable. This should be one of the :ref:`database type + constants `, :ref:`DB API constants `, an object type + returned from the method :meth:`Connection.gettype()` or one of the + following Python types: + + - bool (uses :attr:`oracledb.DB_TYPE_BOOLEAN`) + - bytes (uses :attr:`oracledb.DB_TYPE_RAW`) + - datetime.date (uses :attr:`oracledb.DB_TYPE_DATE`) + - datetime.datetime (uses :attr:`oracledb.DB_TYPE_DATE`) + - datetime.timedelta (uses :attr:`oracledb.DB_TYPE_INTERVAL_DS`) + - decimal.Decimal (uses :attr:`oracledb.DB_TYPE_NUMBER`) + - float (uses :attr:`oracledb.DB_TYPE_NUMBER`) + - int (uses :attr:`oracledb.DB_TYPE_NUMBER`) + - str (uses :attr:`oracledb.DB_TYPE_VARCHAR`) + + The ``size`` parameter specifies the length of string and raw variables + and is ignored in all other cases. If not specified for string and raw + variables, the value *4000* is used. + + The ``arraysize`` parameter specifies the number of elements the + variable will have. If not specified the bind array size (usually *1*) + is used. When a variable is created in an output type handler this + parameter should be set to the cursor's array size. + + The ``inconverter`` and ``outconverter`` parameters specify methods + used for converting values to/from the database. More information can + be found in the section on :ref:`variable objects`. + + The ``typename`` parameter specifies the name of a SQL object type and + must be specified when using type :data:`oracledb.OBJECT` unless the + type object was passed directly as the first parameter. + + The ``encoding_errors`` parameter specifies what should happen when + decoding byte strings fetched from the database into strings. It should + be one of the values noted in the builtin `decode + `__ + function. + + The ``bypass_decode`` parameter, if specified, should be passed as a + boolean value. Passing a *True* value causes values of database types + :data:`~oracledb.DB_TYPE_VARCHAR`, :data:`~oracledb.DB_TYPE_CHAR`, + :data:`~oracledb.DB_TYPE_NVARCHAR`, :data:`~oracledb.DB_TYPE_NCHAR` and + :data:`~oracledb.DB_TYPE_LONG` to be returned as bytes instead of str, + meaning that python-oracledb does not do any decoding. See + :ref:`Fetching raw data ` for more information. + + The ``convert_nulls`` parameter, if specified, should be passed as a + boolean value. Passing the value *True* causes the ``outconverter`` to + be called when a null value is fetched from the database; otherwise, + the ``outconverter`` is only called when non-null values are fetched + from the database. + + For consistency and compliance with the PEP 8 naming style, the + parameter ``encodingErrors`` was renamed to ``encoding_errors``. The + old name will continue to work as a keyword parameter for a period of + time. + """ + self._verify_open() + if typename is not None: + typ = self.connection.gettype(typename) + elif typ is DB_TYPE_OBJECT: + errors._raise_err(errors.ERR_MISSING_TYPE_NAME_FOR_OBJECT_VAR) + if encodingErrors is not None: + if encoding_errors is not None: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="encodingErrors", + new_name="encoding_errors", + ) + encoding_errors = encodingErrors + return self._impl.create_var( + self.connection, + typ, + size, + arraysize, + inconverter, + outconverter, + encoding_errors, + bypass_decode, + convert_nulls=convert_nulls, + ) + + @property + def warning(self) -> Union[errors._Error, None]: + """ + This read-only attribute provides an + :ref:`oracledb._Error` object giving information about any + database warnings (such as PL/SQL compilation warnings) that were + generated during the last call to :meth:`execute()` or + :meth:`executemany()`. This value is automatically cleared on + the next call to :meth:`execute()` or :meth:`executemany()`. If no + warning was generated the value *None* is returned. + """ + self._verify_open() + return self._impl.warning + + +class Cursor(BaseCursor): + + def __iter__(self): + """ + Returns the cursor itself to be used as an iterator. + """ + return self + + def __next__(self): + self._verify_fetch() + row = self._impl.fetch_next_row(self) + if row is not None: + return row + raise StopIteration + + def _get_oci_attr(self, attr_num: int, attr_type: int) -> Any: + """ + Returns the value of the specified OCI attribute from the internal + handle. This is only supported in python-oracledb's thick mode and + should only be used as directed by Oracle. + """ + self._verify_open() + return self._impl._get_oci_attr(attr_num, attr_type) + + def _set_oci_attr(self, attr_num: int, attr_type: int, value: Any) -> None: + """ + Sets the value of the specified OCI attribute on the internal handle. + This is only supported in python-oracledb's thick mode and should only + be used as directed by Oracle. + """ + self._verify_open() + self._impl._set_oci_attr(attr_num, attr_type, value) + + def callfunc( + self, + name: str, + return_type: Any, + parameters: Optional[Union[list, tuple]] = None, + keyword_parameters: Optional[dict] = None, + *, + keywordParameters: Optional[dict] = None, + ) -> Any: + """ + Calls a PL/SQL function with the given name and returns its value. + + The ``return_type`` parameter is expected to be a Python type, one of + the :ref:`oracledb types ` or an + :ref:`Object Type `. + + The sequence of parameters must contain one entry for each parameter + that the PL/SQL function expects. Any keyword parameters will be + included after the positional parameters. + + Use :meth:`var()` to define any OUT or IN OUT parameters, if necessary. + + For consistency and compliance with the PEP 8 naming style, the + parameter ``keywordParameters`` was renamed to ``keyword_parameters``. + The old name will continue to work for a period of time. + """ + var = self.var(return_type) + if keywordParameters is not None: + if keyword_parameters is not None: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="keywordParameters", + new_name="keyword_parameters", + ) + keyword_parameters = keywordParameters + self._call(name, parameters, keyword_parameters, var) + return var.getvalue() + + def callproc( + self, + name: str, + parameters: Optional[Union[list, tuple]] = None, + keyword_parameters: Optional[dict] = None, + *, + keywordParameters: Optional[dict] = None, + ) -> list: + """ + Calls a PL/SQL procedure with the given name. + + The sequence of parameters must contain one entry for each parameter + that the procedure expects. The result of the call is a modified copy + of the input sequence. Input parameters are left untouched; output and + input/output parameters are replaced with possibly new values. Keyword + parameters will be included after the positional parameters and are not + returned as part of the output sequence. + + Use :meth:`var()` to define any OUT or IN OUT parameters if necessary. + + No query result set is returned by this method. Instead, use + :ref:`REF CURSOR ` parameters or + :ref:`Implicit Results `. + + For consistency and compliance with the PEP 8 naming style, the + parameter ``keywordParameters`` was renamed to ``keyword_parameters``. + The old name will continue to work for a period of time. + """ + if keywordParameters is not None: + if keyword_parameters is not None: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="keywordParameters", + new_name="keyword_parameters", + ) + keyword_parameters = keywordParameters + self._call(name, parameters, keyword_parameters) + if parameters is None: + return [] + return [ + v.get_value(0) for v in self._impl.bind_vars[: len(parameters)] + ] + + @property + def connection(self) -> "connection_module.Connection": + """ + This read-only attribute returns a reference to the connection object + on which the cursor was created. + """ + return self._connection + + def execute( + self, + statement: Optional[str], + parameters: Optional[Union[list, tuple, dict]] = None, + *, + suspend_on_success: bool = False, + fetch_lobs: Optional[bool] = None, + fetch_decimals: Optional[bool] = None, + **keyword_parameters: Any, + ) -> Any: + """ + Executes a statement against the database. See :ref:`sqlexecution`. + + Parameters may be passed as a dictionary or sequence or as keyword + parameters. If the parameters are a dictionary, the values will be + bound by name and if the parameters are a sequence the values will be + bound by position. Note that if the values are bound by position, the + order of the variables is from left to right as they are encountered in + the statement and SQL statements are processed differently than PL/SQL + statements. For this reason, it is generally recommended to bind + parameters by name instead of by position. + + Parameters passed as a dictionary are name and value pairs. The name + maps to the bind variable name used by the statement and the value maps + to the Python value you wish bound to that bind variable. + + A reference to the statement will be retained by the cursor. If *None* + or the same string object is passed in again, the cursor will execute + that statement again without performing a prepare or rebinding and + redefining. This is most effective for algorithms where the same + statement is used, but different parameters are bound to it (many + times). Note that parameters that are not passed in during subsequent + executions will retain the value passed in during the last execution + that contained them. + + The ``suspend_on_success`` parameter is specific to :ref:`sessionless + transactions `. When set to *True*, the active + sessionless transaction will be suspended when ``execute()`` completes + successfully. See :ref:`suspendtxns`. + + The ``fetch_lobs`` parameter specifies whether to return LOB locators + or ``str``/``bytes`` values when fetching LOB columns. The default + value is :data:`oracledb.defaults.fetch_lobs `. + + The ``fetch_decimals`` parameter specifies whether to return + ``decimal.Decimal`` values when fetching columns of type ``NUMBER``. + The default value is :data:`oracledb.defaults.fetch_decimals + `. + + For maximum efficiency when reusing a statement, it is best to use the + :meth:`Cursor.setinputsizes()` method to specify the parameter types + and sizes ahead of time; in particular, *None* is assumed to be a + string of length 1 so any values that are later bound as numbers or + dates will raise a TypeError exception. + + If the statement is a SELECT query, the cursor is returned as a + convenience to the caller (so it can be used directly as an iterator + over the rows in the cursor); otherwise, *None* is returned. + """ + self._prepare_for_execute(statement, parameters, keyword_parameters) + impl = self._impl + if fetch_lobs is not None: + impl.fetch_lobs = fetch_lobs + if fetch_decimals is not None: + impl.fetch_decimals = fetch_decimals + impl.suspend_on_success = suspend_on_success + impl.execute(self) + if impl.fetch_vars is not None: + return self + + def executemany( + self, + statement: Optional[str], + parameters: Any, + *, + batcherrors: bool = False, + arraydmlrowcounts: bool = False, + suspend_on_success: bool = False, + batch_size: int = 2**32 - 1, + ) -> None: + """ + Executes a SQL statement once using all bind value mappings or + sequences found in the sequence parameters. This can be used to insert, + update, or delete multiple rows in a table with a single + python-oracledb call. It can also invoke a PL/SQL procedure multiple + times. See :ref:`batchstmnt`. + + The ``statement`` parameter is managed in the same way as the + :meth:`execute()` method manages it. + + The ``parameters`` parameter can be a list of tuples, where each tuple + item maps to one bind variable placeholder in ``statement``. It can + also be a list of dictionaries, where the keys match the bind variable + placeholder names in ``statement``. If there are no bind values, or + values have previously been bound, the ``parameters`` value can be an + integer specifying the number of iterations. The ``parameters`` + parameter can also be a :ref:`DataFrame `, or a + third-party data frame that supports the `Apache Arrow PyCapsule + `__ Interface. + + In python-oracledb Thick mode, if the size of the buffers allocated for + any of the parameters exceeds 2 GB, you will receive the error + ``DPI-1015: array size of is too large``. If you receive this + error, decrease the number of rows being inserted. + + When *True*, the ``batcherrors`` parameter enables batch error support + within Oracle Database and ensures that the call succeeds even if an + exception takes place in one or more of the sequence of bind values. + The errors can then be retrieved using :meth:`getbatcherrors()`. + + When *True*, the ``arraydmlrowcounts`` parameter enables DML row counts + to be retrieved from Oracle after the method has completed. The row + counts can then be retrieved using + :meth:`getarraydmlrowcounts()`. + + Both the ``batcherrors`` parameter and the ``arraydmlrowcounts`` + parameter can only be *True* when executing an insert, update, delete, + or merge statement; in all other cases an error will be raised. + + The ``suspend_on_success`` parameter is specific to :ref:`sessionless + transactions `. When set to *True*, the active + sessionless transaction will be suspended when ``executemany()`` + completes successfully. See :ref:`suspendtxns`. + + The ``batch_size`` parameter is used to split large data sets into + smaller pieces for sending to the database. It is the number of records + in each batch. This parameter can be used to tune performance. When + ``Connection.autocommit`` is *True*, a commit will take place for each + batch. + + For maximum efficiency, it is best to use the :meth:`setinputsizes()` + method to specify the bind value types and sizes. In particular, if the + type is not explicitly specified, the value *None* is assumed to be a + string of length 1 so any values that are later bound as numbers or + dates will raise a TypeError exception. + """ + self._verify_open() + manager = self._impl._prepare_for_executemany( + self, + self._normalize_statement(statement), + parameters, + batch_size, + ) + self._impl.suspend_on_success = suspend_on_success + while manager.num_rows > 0: + self._impl.executemany( + self, + manager.num_rows, + batcherrors, + arraydmlrowcounts, + manager.message_offset, + ) + manager.next_batch() + + def fetchall(self) -> list: + """ + Fetches all (remaining) rows of a SELECT query result, returning them + as a list of tuples. An empty list is returned if no more rows are + available. An exception is raised if the previous call to + :meth:`execute()` did not produce any result set or no call was issued + yet. + + Note that the cursor's :attr:`~Cursor.arraysize` attribute can affect + the performance of this operation, as internally data is fetched in + batches of that size from the database. See :ref:`Tuning Fetch + Performance `. + + An exception is raised if the previous call to :meth:`execute()` did + not produce any result set or no call was issued yet. + """ + self._verify_fetch() + result = [] + fetch_next_row = self._impl.fetch_next_row + while True: + row = fetch_next_row(self) + if row is None: + break + result.append(row) + return result + + def fetchmany( + self, size: Optional[int] = None, numRows: Optional[int] = None + ) -> list: + """ + Fetches the next set of rows of a SELECT query result, returning a list + of tuples. An empty list is returned if no more rows are available. + Note that the cursor's :attr:`arraysize` attribute can affect the + performance of this operation. + + The number of rows to fetch is specified by the ``size`` parameter. If + it is not given, the cursor's :attr:`arraysize` attribute determines + the number of rows to be fetched. If the number of rows available to be + fetched is fewer than the amount requested, fewer rows will be + returned. + + An exception is raised if the previous call to :meth:`execute()` did + not produce any result set or no call was issued yet. + """ + self._verify_fetch() + if size is None: + if numRows is not None: + size = numRows + else: + size = self._impl.arraysize + elif numRows is not None: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="numRows", + new_name="size", + ) + result = [] + fetch_next_row = self._impl.fetch_next_row + while len(result) < size: + row = fetch_next_row(self) + if row is None: + break + result.append(row) + return result + + def fetchone(self) -> Any: + """ + Fetches the next row of a SELECT query result set, returning a single + tuple or *None* when no more data is available. An exception is raised + if the previous call to :meth:`execute()` did not produce any result + set or no call was issued yet. + + When ``fetchone()`` is used to iterate over a result set, the cursor’s + :attr:`arraysize` attribute can affect performance, as internally data + is fetched in batches of that size from Oracle Database. + """ + self._verify_fetch() + return self._impl.fetch_next_row(self) + + def parse(self, statement: str) -> None: + """ + This can be used to parse a statement without actually executing it + (parsing step is done automatically by Oracle when a statement is + :meth:`executed `). + """ + self._verify_open() + self._prepare(statement) + self._impl.parse(self) + + def scroll(self, value: int = 0, mode: str = "relative") -> None: + """ + Scrolls the cursor in the result set to a new position according to the + mode. + + If mode is *relative* (the default value), the value is taken as an + offset to the current position in the result set. If set to *absolute*, + value states an absolute target position. If set to *first*, the cursor + is positioned at the first row and if set to *last*, the cursor is set + to the last row in the result set. + + An error is raised if the mode is *relative* or *absolute* and the + scroll operation would position the cursor outside of the result set. + """ + self._verify_open() + if not self._impl.scrollable: + errors._raise_err(errors.ERR_SCROLL_NOT_SUPPORTED) + self._impl.scroll(self, value, mode) + + +class AsyncCursor(BaseCursor): + + async def __aenter__(self): + """ + The entry point for the cursor as a context manager. It returns itself. + """ + self._verify_open() + return self + + async def __aexit__(self, *exc_info): + """ + The exit point for the cursor as a context manager. It closes the + cursor. + """ + self._verify_open() + self._impl.close(in_del=True) + self._impl = None + + def __aiter__(self): + """ + Returns the cursor itself to be used as an asynchronous iterator. + """ + return self + + async def __anext__(self): + self._verify_fetch() + row = await self._impl.fetch_next_row(self) + if row is not None: + return row + raise StopAsyncIteration + + async def callfunc( + self, + name: str, + return_type: Any, + parameters: Optional[Union[list, tuple]] = None, + keyword_parameters: Optional[dict] = None, + ) -> Any: + """ + Calls a PL/SQL function with the given name and returns its value. + + The ``return_type`` parameter is expected to be a Python type, one of + the :ref:`oracledb types ` or an :ref:`Object Type + `. + + The sequence of parameters must contain one entry for each parameter + that the PL/SQL function expects. Any keyword parameters will be + included after the positional parameters. + + Use :meth:`var()` to define any OUT or IN OUT parameters, if necessary. + """ + var = self.var(return_type) + await self._call(name, parameters, keyword_parameters, var) + return var.getvalue() + + async def callproc( + self, + name: str, + parameters: Optional[Union[list, tuple]] = None, + keyword_parameters: Optional[dict] = None, + ) -> list: + """ + Calls a PL/SQL procedure with the given name. + + The sequence of parameters must contain one entry for each parameter + that the procedure expects. The result of the call is a modified copy + of the input sequence. Input parameters are left untouched; output and + input/output parameters are replaced with possibly new values. Keyword + parameters will be included after the positional parameters and are not + returned as part of the output sequence. + + Use :meth:`var()` to define any OUT or IN OUT parameters if necessary. + + No query result set is returned by :meth:`callproc()`. Instead, use + :ref:`REF CURSOR ` parameters or :ref:`Implicit Results + `. + """ + await self._call(name, parameters, keyword_parameters) + if parameters is None: + return [] + return [ + v.get_value(0) for v in self._impl.bind_vars[: len(parameters)] + ] + + @property + def connection(self) -> "connection_module.AsyncConnection": + """ + This read-only attribute returns a reference to the connection object + on which the cursor was created. + """ + return self._connection + + async def execute( + self, + statement: Optional[str], + parameters: Optional[Union[list, tuple, dict]] = None, + *, + suspend_on_success: bool = False, + fetch_lobs: Optional[bool] = None, + fetch_decimals: Optional[bool] = None, + **keyword_parameters: Any, + ) -> None: + """ + Executes a statement against the database. See :ref:`sqlexecution`. + + Parameters may be passed as a dictionary or sequence or as keyword + parameters. If the parameters are a dictionary, the values will be + bound by name and if the parameters are a sequence the values will be + bound by position. Note that if the values are bound by position, the + order of the variables is from left to right as they are encountered in + the statement and SQL statements are processed differently than PL/SQL + statements. For this reason, it is generally recommended to bind + parameters by name instead of by position. + + Parameters passed as a dictionary are name and value pairs. The name + maps to the bind variable name used by the statement and the value maps + to the Python value you wish bound to that bind variable. + + A reference to the statement will be retained by the cursor. If *None* + or the same string object is passed in again, the cursor will execute + that statement again without performing a prepare or rebinding and + redefining. This is most effective for algorithms where the same + statement is used, but different parameters are bound to it (many + times). Note that parameters that are not passed in during subsequent + executions will retain the value passed in during the last execution + that contained them. + + The ``suspend_on_success`` parameter is specific to :ref:`sessionless + transactions `. When set to *True*, the active + sessionless transaction will be suspended when ``execute()`` completes + successfully. See :ref:`suspendtxns`. + + The ``fetch_lobs`` parameter specifies whether to return LOB locators + or ``str``/``bytes`` values when fetching LOB columns. The default + value is :data:`oracledb.defaults.fetch_lobs `. + + The ``fetch_decimals`` parameter specifies whether to return + ``decimal.Decimal`` values when fetching columns of type ``NUMBER``. + The default value is :data:`oracledb.defaults.fetch_decimals + `. + + For maximum efficiency when reusing a statement, it is best to use the + :meth:`setinputsizes()` method to specify the parameter types and sizes + ahead of time; in particular, *None* is assumed to be a string of + length 1 so any values that are later bound as numbers or dates will + raise a TypeError exception. + + If the statement is a SELECT query, the cursor is returned as a + convenience to the caller (so it can be used directly as an iterator + over the rows in the cursor); otherwise, *None* is returned. + """ + self._prepare_for_execute(statement, parameters, keyword_parameters) + impl = self._impl + impl.suspend_on_success = suspend_on_success + if fetch_lobs is not None: + impl.fetch_lobs = fetch_lobs + if fetch_decimals is not None: + impl.fetch_decimals = fetch_decimals + await self._impl.execute(self) + + async def executemany( + self, + statement: Optional[str], + parameters: Any, + *, + batcherrors: bool = False, + arraydmlrowcounts: bool = False, + suspend_on_success: bool = False, + batch_size: int = 2**32 - 1, + ) -> None: + """ + Executes a SQL statement once using all bind value mappings or + sequences found in the sequence parameters. This can be used to insert, + update, or delete multiple rows in a table with a single + python-oracledb call. It can also invoke a PL/SQL procedure multiple + times. See :ref:`batchstmnt`. + + The ``statement`` parameter is managed in the same way as the + :meth:`execute()` method manages it. + + The ``parameters`` parameter can be a list of tuples, where each tuple + item maps to one bind variable placeholder in ``statement``. It can + also be a list of dictionaries, where the keys match the bind variable + placeholder names in ``statement``. If there are no bind values, or + values have previously been bound, the ``parameters`` value can be an + integer specifying the number of iterations. The ``parameters`` + parameter can also be a :ref:`DataFrame `, or a + third-party data frame that supports the `Apache Arrow PyCapsule + `__ Interface. + + In python-oracledb Thick mode, if the size of the buffers allocated for + any of the parameters exceeds 2 GB, you will receive the error + ``DPI-1015: array size of is too large``. If you receive this + error, decrease the number of rows being inserted. + + When True, the ``batcherrors`` parameter enables batch error support + within Oracle and ensures that the call succeeds even if an exception + takes place in one or more of the sequence of parameters. The errors + can then be retrieved using :meth:`getbatcherrors()`. + + When True, the ``arraydmlrowcounts`` parameter enables DML row counts + to be retrieved from Oracle after the method has completed. The row + counts can then be retrieved using :meth:`getarraydmlrowcounts()`. + + Both the ``batcherrors`` parameter and the ``arraydmlrowcounts`` + parameter can only be True when executing an insert, update, delete, or + merge statement. In all other cases, an error will be raised. + + The ``suspend_on_success`` parameter is specific to :ref:`sessionless + transactions `. When set to *True*, the active + sessionless transaction will be suspended when ``executemany()`` + completes successfully. See :ref:`suspendtxns`. + + The ``batch_size`` parameter is used to split large data sets into + smaller pieces for sending to the database. It is the number of records + in each batch. This parameter can be used to tune performance. When + ``Connection.autocommit`` is *True*, a commit will take place for each + batch. Do not set ``batch_size`` when ``suspend_on_success`` is *True*. + + For maximum efficiency, it is best to use the :meth:`setinputsizes()` + method to specify the parameter types and sizes ahead of time. In + particular, the value *None* is assumed to be a string of length 1 so + any values that are later bound as numbers or dates will raise a + TypeError exception. + """ + self._verify_open() + manager = self._impl._prepare_for_executemany( + self, self._normalize_statement(statement), parameters, batch_size + ) + self._impl.suspend_on_success = suspend_on_success + while manager.num_rows > 0: + await self._impl.executemany( + self, + manager.num_rows, + batcherrors, + arraydmlrowcounts, + manager.message_offset, + ) + manager.next_batch() + + async def fetchall(self) -> list: + """ + Fetches all (remaining) rows of a SELECT query result, returning them + as a list of tuples. An empty list is returned if no more rows are + available. An exception is raised if the previous call to + :meth:`execute()` did not produce any result set or no call was issued + yet. + + Note that the cursor's :attr:`~AsyncCursor.arraysize` attribute can + affect the performance of this operation, as internally data is fetched + in batches of that size from the database. + """ + self._verify_fetch() + result = [] + fetch_next_row = self._impl.fetch_next_row + while True: + row = await fetch_next_row(self) + if row is None: + break + result.append(row) + return result + + async def fetchmany(self, size: Optional[int] = None) -> list: + """ + Fetches the next set of rows of a SELECT query result, returning a list + of tuples. An empty list is returned if no more rows are available. + Note that the cursor's :attr:`arraysize` attribute can affect the + performance of this operation. + + The number of rows to fetch is specified by the parameter. If it is not + given, the cursor's :attr:`arraysize` attribute determines the number + of rows to be fetched. If the number of rows available to be fetched is + fewer than the amount requested, fewer rows will be returned. + + An exception is raised if the previous call to :meth:`execute()` did + not produce any result set or no call was issued yet. + """ + self._verify_fetch() + if size is None: + size = self._impl.arraysize + result = [] + fetch_next_row = self._impl.fetch_next_row + while len(result) < size: + row = await fetch_next_row(self) + if row is None: + break + result.append(row) + return result + + async def fetchone(self) -> Any: + """ + Fetches the next row of a SELECT query result set, returning a single + tuple or *None* when no more data is available. An exception is raised + if the previous call to :meth:`execute()` did not produce any result + set or no call was issued yet. + + When ``fetchone()`` is used to iterate over a result set, the cursor’s + :attr:`arraysize` attribute can affect performance, as internally data + is fetched in batches of that size from Oracle Database. + """ + self._verify_fetch() + return await self._impl.fetch_next_row(self) + + async def parse(self, statement: str) -> None: + """ + This can be used to parse a statement without actually executing it + (parsing step is done automatically by Oracle when a statement is + :meth:`executed `). + """ + self._verify_open() + self._prepare(statement) + await self._impl.parse(self) + + async def scroll(self, value: int = 0, mode: str = "relative") -> None: + """ + Scrolls the cursor in the result set to a new position according to the + mode. + + If mode is *relative* (the default value), the value is taken as an + offset to the current position in the result set. If set to *absolute*, + value states an absolute target position. If set to *first*, the cursor + is positioned at the first row and if set to *last*, the cursor is set + to the last row in the result set. + + An error is raised if the mode is *relative* or *absolute* and the + scroll operation would position the cursor outside of the result set. + """ + self._verify_open() + if not self._impl.scrollable: + errors._raise_err(errors.ERR_SCROLL_NOT_SUPPORTED) + await self._impl.scroll(self, value, mode) diff --git a/.venv/lib/python3.9/site-packages/oracledb/dataframe.py b/.venv/lib/python3.9/site-packages/oracledb/dataframe.py new file mode 100644 index 0000000..feb74c4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/dataframe.py @@ -0,0 +1,122 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# dataframe.py +# +# Implement a data frame that can be used for efficiently transferring Arrow +# array data to other data frame libraries. +# ----------------------------------------------------------------------------- + +from .arrow_array import ArrowArray +from .arrow_impl import DataFrameImpl +from .base import BaseMetaClass +from . import errors + + +class DataFrame(metaclass=BaseMetaClass): + _impl = None + + def __init__(self): + errors._raise_err(errors.ERR_INTERNAL_CREATION_REQUIRED) + + @classmethod + def _from_arrow(cls, obj): + df = cls.__new__(cls) + df._initialize(DataFrameImpl.from_arrow_stream(obj)) + return df + + @classmethod + def _from_impl(cls, impl): + df = cls.__new__(cls) + df._initialize(impl) + return df + + def _initialize(self, impl): + """ + Initializes the object given the implementation. + """ + self._impl = impl + self._arrays = [ArrowArray._from_impl(a) for a in impl.get_arrays()] + self._arrays_by_name = {} + for array in self._arrays: + self._arrays_by_name[array.name] = array + + def __arrow_c_stream__(self, requested_schema=None): + """ + Returns the ArrowArrayStream PyCapsule which allows direct conversion + to foreign data frames that support this interface. + """ + if requested_schema is not None: + raise NotImplementedError("requested_schema") + return self._impl.get_stream_capsule() + + def column_arrays(self) -> list[ArrowArray]: + """ + Returns a list of ArrowArray objects, each containing a select list + column. + """ + return self._arrays + + def column_names(self) -> list[str]: + """ + Returns a list of the column names in the data frame. + """ + return [a.name for a in self._arrays] + + def get_column(self, i: int) -> ArrowArray: + """ + Returns an :ref:`ArrowArray ` object for the + column at the given index ``i``. If the index is out of range, an + IndexError exception is raised. + """ + if i < 0 or i >= self.num_columns(): + raise IndexError( + f"Column index {i} is out of bounds for " + f"DataFrame with {self.num_columns()} columns" + ) + return self._arrays[i] + + def get_column_by_name(self, name: str) -> ArrowArray: + """ + Returns an :ref:`ArrowArray ` object for the + column with the given name ``name``. If the column name is not found, + a KeyError exception is raised. + """ + try: + return self._arrays_by_name[name] + except KeyError: + raise KeyError(f"Column {name} not found in DataFrame") + + def num_columns(self) -> int: + """ + Returns the number of columns in the data frame. + """ + return len(self._arrays) + + def num_rows(self) -> int: + """ + Returns the number of rows in the data frame. + """ + return len(self._arrays[0]) diff --git a/.venv/lib/python3.9/site-packages/oracledb/dbobject.py b/.venv/lib/python3.9/site-packages/oracledb/dbobject.py new file mode 100644 index 0000000..fef230e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/dbobject.py @@ -0,0 +1,392 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# dbobject.py +# +# Contains the classes used for managing database objects and the database +# object type metadata: DbObject, DbObjectType and DbObjectAttr. +# ----------------------------------------------------------------------------- + +from typing import Any, Sequence, Union + +from . import errors +from .base import BaseMetaClass +from .base_impl import DbType + + +class DbObject(metaclass=BaseMetaClass): + + def __getattr__(self, name): + try: + attr_impl = self._impl.type.attrs_by_name[name] + except KeyError: + return super().__getattribute__(name) + return self._impl.get_attr_value(attr_impl) + + def __iter__(self): + self._ensure_is_collection() + ix = self._impl.get_first_index() + while ix is not None: + yield self._impl.get_element_by_index(ix) + ix = self._impl.get_next_index(ix) + + def __repr__(self): + cls_name = self.__class__._public_name + return f"<{cls_name} {self.type._get_full_name()} at {hex(id(self))}>" + + def __setattr__(self, name, value): + if name == "_impl" or name == "_type": + super().__setattr__(name, value) + else: + attr_impl = self._impl.type.attrs_by_name[name] + self._impl.set_attr_value(attr_impl, value) + + def _ensure_is_collection(self): + """ + Ensures that the object refers to a collection. If not, an exception is + raised. + """ + if not self.type.iscollection: + errors._raise_err( + errors.ERR_OBJECT_IS_NOT_A_COLLECTION, + name=self.type._get_full_name(), + ) + + @classmethod + def _from_impl(cls, impl): + obj = cls.__new__(cls) + obj._impl = impl + obj._type = None + return obj + + def append(self, element: Any) -> None: + """ + Appends an element to the collection object. If no elements exist in + the collection, this creates an element at index 0; otherwise, it + creates an element immediately following the highest index available in + the collection. + """ + self._ensure_is_collection() + self._impl.append(element) + + def asdict(self) -> dict: + """ + Returns a dictionary where the collection’s indexes are the keys and + the elements are its values. + """ + self._ensure_is_collection() + result = {} + ix = self._impl.get_first_index() + while ix is not None: + result[ix] = self._impl.get_element_by_index(ix) + ix = self._impl.get_next_index(ix) + return result + + def aslist(self) -> list: + """ + Returns a list of each of the collection’s elements in index order. + """ + return list(self) + + def copy(self) -> "DbObject": + """ + Creates a copy of the object and returns it. + """ + copied_impl = self._impl.copy() + return DbObject._from_impl(copied_impl) + + def delete(self, index: int) -> None: + """ + Delete the element at the specified index of the collection. If the + element does not exist or is otherwise invalid, an error is raised. + Note that the indices of the remaining elements in the collection are + not changed. In other words, the delete operation creates holes in the + collection. + """ + self._ensure_is_collection() + self._impl.delete_by_index(index) + + def exists(self, index: int) -> bool: + """ + Return True or False indicating if an element exists in the collection + at the specified index. + """ + self._ensure_is_collection() + return self._impl.exists_by_index(index) + + def extend(self, seq: list) -> None: + """ + Appends all of the elements in the sequence to the collection. This is + the equivalent of performing append() for each element found in the + sequence. + """ + self._ensure_is_collection() + for value in seq: + self.append(value) + + def first(self) -> int: + """ + Returns the index of the first element in the collection. If the + collection is empty, None is returned. + """ + self._ensure_is_collection() + return self._impl.get_first_index() + + def getelement(self, index: int) -> Any: + """ + Returns the element at the specified index of the collection. If no + element exists at that index, an exception is raised. + """ + self._ensure_is_collection() + return self._impl.get_element_by_index(index) + + def last(self) -> int: + """ + Returns the index of the last element in the collection. If the + collection is empty, None is returned. + """ + self._ensure_is_collection() + return self._impl.get_last_index() + + def next(self, index: int) -> int: + """ + Returns the index of the next element in the collection following the + specified index. If there are no elements in the collection following + the specified index, None is returned. + """ + self._ensure_is_collection() + return self._impl.get_next_index(index) + + def prev(self, index: int) -> int: + """ + Returns the index of the element in the collection preceding the + specified index. If there are no elements in the collection preceding + the specified index, None is returned. + """ + self._ensure_is_collection() + return self._impl.get_prev_index(index) + + def setelement(self, index: int, value: Any) -> None: + """ + Sets the value in the collection at the specified index to the given + value. + """ + self._ensure_is_collection() + self._impl.set_element_by_index(index, value) + + def size(self) -> int: + """ + Returns the number of elements in the collection. + """ + self._ensure_is_collection() + return self._impl.get_size() + + def trim(self, num: int) -> None: + """ + Removes the specified number of elements from the end of the + collection. + """ + self._ensure_is_collection() + self._impl.trim(num) + + @property + def type(self) -> "DbObjectType": + """ + This read-only attribute arturns an ObjectType corresponding to the + type of the object. + """ + if self._type is None: + self._type = DbObjectType._from_impl(self._impl.type) + return self._type + + +class DbObjectAttr(metaclass=BaseMetaClass): + + def __repr__(self): + return f"" + + @classmethod + def _from_impl(cls, impl): + attr = cls.__new__(cls) + attr._impl = impl + attr._type = None + return attr + + @property + def max_size(self) -> Union[int, None]: + """ + This read-only attribute returns the maximum size (in bytes) of the + attribute when the attribute's type is one of + DB_TYPE_RAW, DB_TYPE_CHAR, DB_TYPE_NCHAR, DB_TYPE_VARCHAR and + DB_TYPE_NVARCHAR. For all other types, the value returned is None. + """ + if self._impl.max_size: + return self._impl.max_size + + @property + def name(self) -> str: + """ + This read-only attribute returns the name of the attribute. + """ + return self._impl.name + + @property + def precision(self) -> Union[int, None]: + """ + This read-only attribute returns the precision of the attribute when + the attribute's type is DB_TYPE_NUMBER. For all other types, the value + returned is None. + """ + if self._impl.precision or self._impl.scale: + return self._impl.precision + + @property + def scale(self) -> Union[int, None]: + """ + This read-only attribute returns the scale of the attribute when the + attribute's type is DB_TYPE_NUMBER. For all other types, the value + returned is None. + """ + if self._impl.precision or self._impl.scale: + return self._impl.scale + + @property + def type(self) -> Union["DbObjectType", DbType]: + """ + This read-only attribute returns the type of the attribute. This will + be an Oracle Object Type if the variable binds Oracle objects; + otherwise, it will be one of the database type constants. + """ + if self._type is None: + if self._impl.objtype is not None: + self._type = DbObjectType._from_impl(self._impl.objtype) + else: + self._type = self._impl.dbtype + return self._type + + +class DbObjectType(metaclass=BaseMetaClass): + + def __call__(self, value: Sequence = None) -> DbObject: + """ + The object type may be called directly and serves as an alternative way + of calling :meth:`~DbObjectType.newobject()`. + """ + return self.newobject(value) + + def __eq__(self, other): + if isinstance(other, DbObjectType): + return other._impl == self._impl + return NotImplemented + + def __repr__(self): + return f"" + + @classmethod + def _from_impl(cls, impl): + typ = cls.__new__(cls) + typ._impl = impl + typ._attributes = None + typ._element_type = None + return typ + + def _get_full_name(self): + """ + Returns the full name of the type. + """ + return self._impl._get_fqn() + + @property + def attributes(self) -> list["DbObjectAttr"]: + """ + This read-only attribute returns a list of the attributes that make up + the object type. + """ + if self._attributes is None: + self._attributes = [ + DbObjectAttr._from_impl(i) for i in self._impl.attrs + ] + return self._attributes + + @property + def iscollection(self) -> bool: + """ + This read-only attribute returns a boolean indicating if the object + type refers to a collection or not. + """ + return self._impl.is_collection + + @property + def name(self) -> str: + """ + This read-only attribute returns the name of the type. + """ + return self._impl.name + + @property + def element_type(self) -> Union["DbObjectType", DbType]: + """ + This read-only attribute returns the type of elements found in + collections of this type, if iscollection is True; otherwise, it + returns None. If the collection contains objects, this will be another + object type; otherwise, it will be one of the database type constants. + """ + if self._element_type is None: + if self._impl.element_metadata.objtype is not None: + typ_impl = self._impl.element_metadata.objtype + self._element_type = DbObjectType._from_impl(typ_impl) + else: + self._element_type = self._impl.element_metadata.dbtype + return self._element_type + + def newobject(self, value: Sequence = None) -> DbObject: + """ + Returns a new Oracle object of the given type. This object can then be + modified by setting its attributes and then bound to a cursor for + interaction with Oracle. If the object type refers to a collection, a + sequence may be passed and the collection will be initialized with the + items in that sequence. + """ + obj_impl = self._impl.create_new_object() + obj = DbObject._from_impl(obj_impl) + if value is not None: + obj.extend(value) + return obj + + @property + def package_name(self) -> str: + """ + This read-only attribute returns the name of the package containing the + PL/SQL type or None if the type is not a PL/SQL type. + """ + return self._impl.package_name + + @property + def schema(self) -> str: + """ + This read-only attribute returns the name of the schema that owns the + type. + """ + return self._impl.schema diff --git a/.venv/lib/python3.9/site-packages/oracledb/defaults.py b/.venv/lib/python3.9/site-packages/oracledb/defaults.py new file mode 100644 index 0000000..c717c84 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/defaults.py @@ -0,0 +1,329 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# defaults.py +# +# Contains the Defaults class used for managing default values used throughout +# the module. +# ----------------------------------------------------------------------------- + +from . import base_impl +from . import errors +from .base import BaseMetaClass + + +class Defaults(metaclass=BaseMetaClass): + """ + A singleton Defaults object contains attributes to adjust default + behaviors of python-oracledb. It is accessed using the :data:`defaults` + attribute of the imported module. + """ + + def __init__(self) -> None: + self._impl = base_impl.DEFAULTS + + @property + def arraysize(self) -> int: + """ + This read-write attribute specifies the default arraysize to use when + cursors are created. + + It is an attribute for tuning the performance of fetching rows from + Oracle Database. It does not affect data insertion. + + This value is the default for :attr:`Cursor.arraysize` and + :attr:`AsyncCursor.arraysize`. + + This attribute has an initial value of *100*. + """ + return self._impl.arraysize + + @arraysize.setter + def arraysize(self, value: int): + self._impl.arraysize = value + + @property + def config_dir(self) -> str: + """ + This read-write attribute specifies the directory in which the optional + configuration file ``tnsnames.ora`` will be read in python-oracledb + Thin mode. It is also used in Thick mode if + :attr:`Defaults.thick_mode_dsn_passthrough` is *False*. + + At time of ``import oracledb`` the value of + ``oracledb.defaults.config_dir`` will be set to (first one wins): + + - the value of ``$TNS_ADMIN``, if ``TNS_ADMIN`` is set. + + - ``$ORACLE_HOME/network/admin``, if ``$ORACLE_HOME`` is set. + + Otherwise, ``oracledb.defaults.config_dir`` will not be set. + + At completion of a call to :meth:`oracledb.init_oracle_client()` in + python-oracledb Thick mode, the value of ``config_dir`` may get + changed. + """ + return self._impl.config_dir + + @config_dir.setter + def config_dir(self, value: str): + self._impl.config_dir = value + + @property + def fetch_lobs(self) -> bool: + """ + This read-write attribute specifies whether queries that contain LOBs + should return LOB objects or their contents instead. + + When the value of this attribute is *True*, then queries to LOB columns + return LOB locators. When the value of this attribute is *False*, then + CLOBs and NCLOBs are fetched as strings, and BLOBs are fetched as + bytes. If LOBs are larger than 1 GB, then this attribute should be set + to *True* and the LOBs should be streamed. + + The value of ``oracledb.defaults.fetch_lobs`` does not affect LOBs + returned as OUT binds. + + The value of ``fetch_lobs`` can be overridden at statement execution by + passing an equivalent parameter. + + An output type handler such as the one previously required in the + obsolete cx_Oracle driver can alternatively be used to adjust the + returned type. If a type handler exists and returns a variable (that + is, `cursor.var(...)`), then that return variable is used. If the type + handler returns *None*, then the value of + ``oracledb.defaults.fetch_lobs`` is used. + + This attribute has an initial value of *True*. + """ + return self._impl.fetch_lobs + + @fetch_lobs.setter + def fetch_lobs(self, value: bool): + self._impl.fetch_lobs = value + + @property + def fetch_decimals(self) -> bool: + """ + This read-write attribute specifies whether queries that contain + numbers should be fetched as Python decimal.Decimal objects or floating + point numbers. This can help avoid issues with converting numbers from + Oracle Database's decimal format to Python's binary format. + + The value of ``fetch_decimals`` can be overridden at statement + execution by passing an equivalent parameter. + + An output type handler such as previously required in the obsolete + cx_Oracle driver can alternatively be used to adjust the returned type. + If a type handler exists and returns a variable (that is, + ``cursor.var(...)``), then that return variable is used. If the type + handler returns *None*, then the value of + ``oracledb.defaults.fetch_decimals`` is used to determine whether to + return ``decimal.Decimal`` values. + + This attribute has an initial value of *False*. + """ + return self._impl.fetch_decimals + + @fetch_decimals.setter + def fetch_decimals(self, value: bool): + self._impl.fetch_decimals = value + + @property + def prefetchrows(self) -> int: + """ + This read-write attribute specifies the default number of rows to + prefetch when cursors are executed. + + This is an attribute for tuning the performance of fetching rows from + Oracle Database. It does not affect data insertion. + + This value is the default for :attr:`Cursor.prefetchrows` and + :attr:`AsyncCursor.prefetchrows`. + + This attribute is ignored when using :meth:`Connection.fetch_df_all()` + or :meth:`Connection.fetch_df_batches()` since these methods always set + the internal prefetch size to their relevant ``arraysize`` or ``size`` + parameter value. + + This attribute has an initial value of *2*. + """ + return self._impl.prefetchrows + + @prefetchrows.setter + def prefetchrows(self, value: int): + self._impl.prefetchrows = value + + @property + def stmtcachesize(self) -> int: + """ + This read-write attribute specifies the default size of the statement + cache. + + This is an attribute for tuning statement execution performance when a + statement is executed more than once. + + This value is the default for :attr:`Connection.stmtcachesize`, + :attr:`ConnectionPool.stmtcachesize`, + :attr:`AsyncConnection.stmtcachesize`, and + :attr:`AsyncConnectionPool.stmtcachesize`. + + This attribute has an initial value of *20*. + """ + return self._impl.stmtcachesize + + @stmtcachesize.setter + def stmtcachesize(self, value: int): + self._impl.stmtcachesize = value + + @property + def program(self) -> str: + """ + This read-write attribute is a string recorded by Oracle Database + as the program from which the connection originates. This is the value + used in the PROGRAM column of the V$SESSION view. + + This attribute has an initial value that is populated by + `sys.executable `__. + + This attribute is only used in python-oracledb Thin mode. + """ + return self._impl.program + + @program.setter + def program(self, value: str): + if base_impl.sanitize(value) != value: + errors._raise_err(errors.ERR_INVALID_NETWORK_NAME, name="program") + self._impl.program = value + + @property + def machine(self) -> str: + """ + This read-write attribute is a string recorded by Oracle Database as + the name of machine from which the connection originates. This is the + value used in the MACHINE column of the V$SESSION view. + + This attribute takes the host name where the application is running as + its initial value. + + This attribute is only used in python-oracledb Thin mode. + """ + return self._impl.machine + + @machine.setter + def machine(self, value: str): + if base_impl.sanitize(value) != value: + errors._raise_err(errors.ERR_INVALID_NETWORK_NAME, name="machine") + self._impl.machine = value + + @property + def terminal(self) -> str: + """ + This read-write attribute specifies the terminal identifier from which + the connection originates. This is the value used in the TERMINAL + column of the V$SESSION view. + + This attribute has an initial value of "unknown". + + This attribute is only used in python-oracledb Thin mode. + """ + return self._impl.terminal + + @terminal.setter + def terminal(self, value: str): + self._impl.terminal = value + + @property + def osuser(self) -> str: + """ + This read-write attribute is a string recorded by Oracle Database + as the operating system user who originated the connection. This is the + value used in the OSUSER column of the V$SESSION view. + + This attribute takes the login name of the user as its initial value. + + This attribute is only used in python-oracledb Thin mode. + """ + return self._impl.osuser + + @osuser.setter + def osuser(self, value: str): + if base_impl.sanitize(value) != value: + errors._raise_err(errors.ERR_INVALID_NETWORK_NAME, name="osuser") + self._impl.osuser = value + + @property + def driver_name(self) -> str: + """ + This read-write attribute is a string recorded by Oracle Database + as the name of the driver which originated the connection. This is the + value used in the CLIENT_DRIVER column of the V$SESSION_CONNECT_INFO + view. + + This attribute has an initial value of *None*. It is used as required + in python-oracledb Thick and Thin mode. + + In python-oracledb Thick mode, this attribute is used if the + ``driver_name`` parameter is not specified in + :meth:`oracledb.init_oracle_client()`. In Thin mode, this attribute is + used if the ``driver_name`` parameter is not specified in + :meth:`oracledb.connect()`, :meth:`oracledb.connect_async()`, + :meth:`oracledb.create_pool()`, or + :meth:`oracledb.create_pool_async()`. If the value of this attribute is + *None*, the value set when connecting in python-oracledb Thick mode is + like "python-oracledb thk : " and in Thin mode is like + "python-oracledb thn : ". + """ + return self._impl.driver_name + + @driver_name.setter + def driver_name(self, value: str): + self._impl.driver_name = value + + @property + def thick_mode_dsn_passthrough(self) -> bool: + """ + This read-write attribute determines whether + connection strings passed as the ``dsn`` parameter to + :meth:`oracledb.connect()`, :meth:`oracledb.create_pool()`, + :meth:`oracledb.connect_async()`, and + :meth:`oracledb.create_pool_async()` in python-oracledb Thick mode will + be parsed by Oracle Client libraries or by python-oracledb itself. + + The value of ``thick_mode_dsn_passthrough`` is ignored in + python-oracledb Thin mode, which always parses all connect strings + (including reading a tnsnames.ora file, if required). + + This attribute has an initial value of *True*. + """ + return self._impl.thick_mode_dsn_passthrough + + @thick_mode_dsn_passthrough.setter + def thick_mode_dsn_passthrough(self, value: str): + self._impl.thick_mode_dsn_passthrough = value + + +defaults = Defaults() diff --git a/.venv/lib/python3.9/site-packages/oracledb/driver_mode.py b/.venv/lib/python3.9/site-packages/oracledb/driver_mode.py new file mode 100644 index 0000000..15c93dc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/driver_mode.py @@ -0,0 +1,140 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025 Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# driver_mode.py +# +# Contains a simple method for checking and returning which mode the driver is +# currently using. The driver only supports creating connections and pools with +# either the thin implementation or the thick implementation, not both +# simultaneously. +# ----------------------------------------------------------------------------- + +import threading + +from . import errors + + +# The DriverModeHandler class is used to manage which mode the driver is using. +# +# The "thin_mode" flag contains the current state: +# None: neither thick nor thin implementation has been used yet +# False: thick implementation is being used +# True: thin implementation is being used +# +# The "requested_thin_mode" flag is set to the mode that is being requested: +# False: thick implementation is being initialized +# True: thin implementation is being initialized +class DriverModeManager: + """ + Manages the mode the driver is using. The "thin_mode" flag contains the + current state: + None: neither thick nor thin implementation has been used yet + False: thick implementation is being used + True: thin implementation is being used + The "requested_thin_mode" is set to the mode that is being requested, but + only while initialization is taking place (otherwise, it contains the value + None): + False: thick implementation is being initialized + True: thin implementation is being initialized + The condition is used to ensure that only one thread is performing + initialization. + """ + + def __init__(self): + self.thin_mode = None + self.requested_thin_mode = None + self.condition = threading.Condition() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + with self.condition: + if ( + exc_type is None + and exc_value is None + and exc_tb is None + and self.requested_thin_mode is not None + ): + self.thin_mode = self.requested_thin_mode + self.requested_thin_mode = None + self.condition.notify() + + @property + def thin(self): + if self.requested_thin_mode is not None: + return self.requested_thin_mode + return self.thin_mode + + +manager = DriverModeManager() + + +def get_manager(requested_thin_mode=None): + """ + Returns the manager, but only after ensuring that no other threads are + attempting to initialize the mode. + """ + with manager.condition: + if manager.thin_mode is None: + if manager.requested_thin_mode is not None: + manager.condition.wait() + if manager.thin_mode is None: + if requested_thin_mode is None: + manager.requested_thin_mode = True + else: + manager.requested_thin_mode = requested_thin_mode + elif ( + requested_thin_mode is not None + and requested_thin_mode != manager.thin_mode + ): + if requested_thin_mode: + errors._raise_err(errors.ERR_THICK_MODE_ENABLED) + else: + errors._raise_err(errors.ERR_THIN_CONNECTION_ALREADY_CREATED) + return manager + + +def is_thin_mode() -> bool: + """ + Returns a boolean indicating if python-oracledb is in Thin mode. + + Immediately after python-oracledb is imported, this function will return + *True* indicating that python-oracledb defaults to Thin mode. If a call to + :func:`oracledb.init_oracle_client()` returns successfully, then a + subsequent call to ``is_thin_mode()`` will return False indicating that + Thick mode is enabled. Once the first standalone connection or connection + pool is created, or a successful call to ``oracledb.init_oracle_client()`` + is made, or :meth:`oracledb.enable_thin_mode()` is called, then + python-oracledb’s mode is fixed and the value returned by + ``is_thin_mode()`` will never change for the lifetime of the process. + + The attribute :attr:`Connection.thin` can be used to check a connection's + mode. The attribute :attr:`ConnectionPool.thin` can be used to check a + pool's mode. + """ + if manager.thin_mode is not None: + return manager.thin_mode + return True diff --git a/.venv/lib/python3.9/site-packages/oracledb/dsn.py b/.venv/lib/python3.9/site-packages/oracledb/dsn.py new file mode 100644 index 0000000..13bb31c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/dsn.py @@ -0,0 +1,81 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# dsn.py +# +# Contains makedsn(), a method available for backwards compatibility with +# cx_Oracle. Use of the ConnectParams class or the keyword arguments to +# connect() and create_pool() is recommended instead. +# ----------------------------------------------------------------------------- + +from . import errors + + +def _check_arg(name: str, value: str) -> None: + """ + Checks the argument to ensure that it does not contain (, ) or = as these + characters are not permitted within connect strings. + """ + if "(" in value or ")" in value or "=" in value: + errors._raise_err(errors.ERR_INVALID_MAKEDSN_ARG, name=name) + + +def makedsn( + host: str, + port: int, + sid: str = None, + service_name: str = None, + region: str = None, + sharding_key: str = None, + super_sharding_key: str = None, +) -> str: + """ + Returns a string suitable for use as the ``dsn`` parameter for + :meth:`~oracledb.connect()`. This string is identical to the strings that + are defined by the Oracle names server or defined in the ``tnsnames.ora`` + file. + """ + connect_data_parts = [] + _check_arg("host", host) + if service_name is not None: + _check_arg("service_name", service_name) + connect_data_parts.append(f"(SERVICE_NAME={service_name})") + elif sid is not None: + _check_arg("sid", sid) + connect_data_parts.append(f"(SID={sid})") + if region is not None: + _check_arg("region", region) + connect_data_parts.append(f"(REGION={region})") + if sharding_key is not None: + _check_arg("sharding_key", sharding_key) + connect_data_parts.append(f"(SHARDING_KEY={sharding_key})") + if super_sharding_key is not None: + _check_arg("super_sharding_key", super_sharding_key) + connect_data_parts.append(f"(SUPER_SHARDING_KEY={super_sharding_key})") + connect_data = "".join(connect_data_parts) + return ( + f"(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST={host})" + f"(PORT={port}))(CONNECT_DATA={connect_data}))" + ) diff --git a/.venv/lib/python3.9/site-packages/oracledb/enums.py b/.venv/lib/python3.9/site-packages/oracledb/enums.py new file mode 100644 index 0000000..64ad212 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/enums.py @@ -0,0 +1,76 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2024, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# enums.py +# +# Contains the enumerations of various constants used throughout the package. +# ----------------------------------------------------------------------------- + +import enum + +from . import base_impl + + +class AuthMode(enum.IntFlag): + DEFAULT = base_impl.AUTH_MODE_DEFAULT + PRELIM = base_impl.AUTH_MODE_PRELIM + SYSASM = base_impl.AUTH_MODE_SYSASM + SYSBKP = base_impl.AUTH_MODE_SYSBKP + SYSDBA = base_impl.AUTH_MODE_SYSDBA + SYSDGD = base_impl.AUTH_MODE_SYSDGD + SYSKMT = base_impl.AUTH_MODE_SYSKMT + SYSOPER = base_impl.AUTH_MODE_SYSOPER + SYSRAC = base_impl.AUTH_MODE_SYSRAC + + +class PipelineOpType(enum.IntFlag): + CALL_FUNC = base_impl.PIPELINE_OP_TYPE_CALL_FUNC + CALL_PROC = base_impl.PIPELINE_OP_TYPE_CALL_PROC + COMMIT = base_impl.PIPELINE_OP_TYPE_COMMIT + EXECUTE = base_impl.PIPELINE_OP_TYPE_EXECUTE + EXECUTE_MANY = base_impl.PIPELINE_OP_TYPE_EXECUTE_MANY + FETCH_ALL = base_impl.PIPELINE_OP_TYPE_FETCH_ALL + FETCH_MANY = base_impl.PIPELINE_OP_TYPE_FETCH_MANY + FETCH_ONE = base_impl.PIPELINE_OP_TYPE_FETCH_ONE + + +class PoolGetMode(enum.IntEnum): + FORCEGET = base_impl.POOL_GETMODE_FORCEGET + NOWAIT = base_impl.POOL_GETMODE_NOWAIT + TIMEDWAIT = base_impl.POOL_GETMODE_TIMEDWAIT + WAIT = base_impl.POOL_GETMODE_WAIT + + +class Purity(enum.IntEnum): + DEFAULT = base_impl.PURITY_DEFAULT + NEW = base_impl.PURITY_NEW + SELF = base_impl.PURITY_SELF + + +class VectorFormat(enum.IntEnum): + BINARY = base_impl.VECTOR_FORMAT_BINARY + FLOAT32 = base_impl.VECTOR_FORMAT_FLOAT32 + FLOAT64 = base_impl.VECTOR_FORMAT_FLOAT64 + INT8 = base_impl.VECTOR_FORMAT_INT8 diff --git a/.venv/lib/python3.9/site-packages/oracledb/errors.py b/.venv/lib/python3.9/site-packages/oracledb/errors.py new file mode 100644 index 0000000..44d406f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/errors.py @@ -0,0 +1,1019 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2020, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# errors.py +# +# Contains the _Error class and all of the errors that are raised explicitly by +# the package. Oracle Database errors and ODPI-C errors (when using thick mode) +# are only referenced here if they are transformed into package specific +# errors. +# ----------------------------------------------------------------------------- + +import re + +from .driver_mode import is_thin_mode +from . import exceptions + + +class _Error: + """ + Error class which is used for all errors that are raised by the driver. + """ + + def __init__( + self, + message: str = None, + context: str = None, + isrecoverable: bool = False, + iswarning: bool = False, + code: int = 0, + offset: int = 0, + ) -> None: + self.message = message + self.context = context + self.isrecoverable = isrecoverable + self.iswarning = iswarning + self.code = code + self.offset = offset + self.is_session_dead = False + self.full_code = "" + self.exc_type = exceptions.DatabaseError + self._make_adjustments() + + def _make_adjustments(self): + """ + Make adjustments to the error, if needed, and calculate the full_code + attribute. + """ + if self.message is not None: + pos = self.message.find(":") + if pos > 0: + self.full_code = self.message[:pos] + + # add Oracle Database Error Help Portal URL for database error + # messages, but only in thin mode since this is done + # automatically in thick mode with Oracle Client version 23 and + # higher + if ( + self.code != 0 + and (self.code < 20000 or self.code >= 21000) + and is_thin_mode() + ): + self.message = ( + self.message + + "\n" + + "Help: https://docs.oracle.com/error-help/db/ora-" + + f"{self.code:05}/" + ) + elif self.full_code in ERR_TROUBLESHOOTING_AVAILABLE: + self.message = ( + self.message + + "\n" + + "Help: https://python-oracledb.readthedocs.io/en/" + + "latest/user_guide/troubleshooting.html#" + + self.full_code.lower() + ) + + # transform Oracle and ODPI-C specific error codes to driver errors, + # if applicable + if self.code != 0 or self.full_code.startswith("DPI-"): + args = {} + if self.code != 0: + driver_error_info = ERR_ORACLE_ERROR_XREF.get(self.code) + else: + error_num = int(self.full_code[4:]) + driver_error_info = ERR_DPI_ERROR_XREF.get(error_num) + if driver_error_info is not None: + if isinstance(driver_error_info, tuple): + driver_error_num, pattern = driver_error_info + match = re.search(pattern, self.message) + args = {} if match is None else match.groupdict() + else: + driver_error_num = driver_error_info + driver_error = _get_error_text(driver_error_num, **args) + self.message = f"{driver_error}\n{self.message}" + self.full_code = f"{ERR_PREFIX}-{driver_error_num:04}" + + # determine exception class to use when raising this error + # also determine whether error is recoverable and whether the session + # is deemed "dead" + if self.full_code.startswith("DPY-"): + driver_error_num = int(self.full_code[4:]) + if driver_error_num == ERR_CONNECTION_CLOSED: + self.is_session_dead = self.isrecoverable = True + self.exc_type = ERR_EXCEPTION_TYPES[driver_error_num // 1000] + elif self.code != 0: + if self.code in ERR_RECOVERABLE_ERROR_CODES: + self.isrecoverable = True + if self.code in ERR_INTEGRITY_ERROR_CODES: + self.exc_type = exceptions.IntegrityError + elif self.code in ERR_INTERFACE_ERROR_CODES: + self.exc_type = exceptions.InterfaceError + elif self.code in ERR_OPERATIONAL_ERROR_CODES: + self.exc_type = exceptions.OperationalError + + def __str__(self): + return self.message + + +def _get_error_text(error_num: int, **args) -> str: + """ + Return the error text for the driver specific error number. + """ + message_format = ERR_MESSAGE_FORMATS.get(error_num) + if message_format is None: + message_format = "missing error {error_num}" + args = dict(error_num=error_num) + error_num = ERR_MISSING_ERROR + try: + message = message_format.format(**args) + except KeyError: + message = ( + message_format + + "\nWrong arguments to message format:\n" + + repr(args) + ) + return f"{ERR_PREFIX}-{error_num:04}: {message}" + + +def _create_err( + error_num: int, + context_error_message: str = None, + cause: Exception = None, + **args, +) -> _Error: + """ + Returns a driver specific error object for the specified error number and + supplied arguments. + """ + message = _get_error_text(error_num, **args) + if context_error_message is None and cause is not None: + context_error_message = str(cause) + if context_error_message is not None: + message = f"{message}\n{context_error_message}" + return _Error(message) + + +def _create_warning(error_num: int, **args) -> _Error: + """ + Returns a warning error object for the specified error number and supplied + arguments. + """ + message = _get_error_text(error_num, **args) + return _Error(message, iswarning=True) + + +def _raise_err( + error_num: int, + context_error_message: str = None, + cause: Exception = None, + **args, +) -> None: + """ + Raises a driver specific exception from the specified error number and + supplied arguments. + """ + error = _create_err(error_num, context_error_message, cause, **args) + raise error.exc_type(error) from cause + + +def _raise_not_supported(feature: str) -> None: + """ + Raises an exception that the specified feature is not supported. This is + used as the default implementation of all functions for the implementation + objects. + """ + driver_type = "thick" if is_thin_mode() else "thin" + _raise_err( + ERR_FEATURE_NOT_SUPPORTED, feature=feature, driver_type=driver_type + ) + + +# prefix used for all error messages +ERR_PREFIX = "DPY" + +# error numbers that result in InterfaceError +ERR_MISSING_ERROR = 1000 +ERR_NOT_CONNECTED = 1001 +ERR_POOL_NOT_OPEN = 1002 +ERR_NOT_A_QUERY = 1003 +ERR_NO_STATEMENT_EXECUTED = 1004 +ERR_POOL_HAS_BUSY_CONNECTIONS = 1005 +ERR_CURSOR_NOT_OPEN = 1006 + +# error numbers that result in ProgrammingError +ERR_MESSAGE_HAS_NO_PAYLOAD = 2000 +ERR_NO_STATEMENT = 2001 +ERR_NO_STATEMENT_PREPARED = 2002 +ERR_WRONG_EXECUTE_PARAMETERS_TYPE = 2003 +ERR_WRONG_EXECUTEMANY_PARAMETERS_TYPE = 2004 +ERR_ARGS_AND_KEYWORD_ARGS = 2005 +ERR_MIXED_POSITIONAL_AND_NAMED_BINDS = 2006 +ERR_EXPECTING_TYPE = 2007 +ERR_WRONG_OBJECT_TYPE = 2008 +ERR_WRONG_SCROLL_MODE = 2009 +ERR_MIXED_ELEMENT_TYPES = 2010 +ERR_WRONG_ARRAY_DEFINITION = 2011 +ERR_ARGS_MUST_BE_LIST_OR_TUPLE = 2012 +ERR_KEYWORD_ARGS_MUST_BE_DICT = 2013 +ERR_DUPLICATED_PARAMETER = 2014 +ERR_EXPECTING_VAR = 2015 +ERR_INCORRECT_VAR_ARRAYSIZE = 2016 +ERR_LIBRARY_ALREADY_INITIALIZED = 2017 +ERR_WALLET_FILE_MISSING = 2018 +ERR_THIN_CONNECTION_ALREADY_CREATED = 2019 +ERR_INVALID_MAKEDSN_ARG = 2020 +ERR_INIT_ORACLE_CLIENT_NOT_CALLED = 2021 +ERR_INVALID_OCI_ATTR_TYPE = 2022 +ERR_INVALID_CONN_CLASS = 2023 +ERR_INVALID_CONNECT_PARAMS = 2025 +ERR_INVALID_POOL_CLASS = 2026 +ERR_INVALID_POOL_PARAMS = 2027 +ERR_EXPECTING_LIST_FOR_ARRAY_VAR = 2028 +ERR_HTTPS_PROXY_REQUIRES_TCPS = 2029 +ERR_INVALID_LOB_OFFSET = 2030 +ERR_INVALID_ACCESS_TOKEN_PARAM = 2031 +ERR_INVALID_ACCESS_TOKEN_RETURNED = 2032 +ERR_EXPIRED_ACCESS_TOKEN = 2033 +ERR_ACCESS_TOKEN_REQUIRES_TCPS = 2034 +ERR_INVALID_OBJECT_TYPE_NAME = 2035 +ERR_OBJECT_IS_NOT_A_COLLECTION = 2036 +ERR_MISSING_TYPE_NAME_FOR_OBJECT_VAR = 2037 +ERR_INVALID_COLL_INDEX_GET = 2038 +ERR_INVALID_COLL_INDEX_SET = 2039 +ERR_EXECUTE_MODE_ONLY_FOR_DML = 2040 +ERR_MISSING_ENDING_SINGLE_QUOTE = 2041 +ERR_MISSING_ENDING_DOUBLE_QUOTE = 2042 +ERR_DBOBJECT_ATTR_MAX_SIZE_VIOLATED = 2043 +ERR_DBOBJECT_ELEMENT_MAX_SIZE_VIOLATED = 2044 +ERR_INVALID_ARRAYSIZE = 2045 +ERR_CURSOR_HAS_BEEN_CLOSED = 2046 +ERR_INVALID_LOB_AMOUNT = 2047 +ERR_DML_RETURNING_DUP_BINDS = 2048 +ERR_MISSING_ADDRESS = 2049 +ERR_INVALID_TPC_BEGIN_FLAGS = 2050 +ERR_INVALID_TPC_END_FLAGS = 2051 +ERR_MISMATCHED_TOKEN = 2052 +ERR_THICK_MODE_ENABLED = 2053 +ERR_NAMED_POOL_MISSING = 2054 +ERR_NAMED_POOL_EXISTS = 2055 +ERR_PROTOCOL_HANDLER_FAILED = 2056 +ERR_PASSWORD_TYPE_HANDLER_FAILED = 2057 +ERR_PLAINTEXT_PASSWORD_IN_CONFIG = 2058 +ERR_MISSING_CONNECT_DESCRIPTOR = 2059 +ERR_ARROW_C_API_ERROR = 2060 +ERR_PARAMS_HOOK_HANDLER_FAILED = 2061 +ERR_PAYLOAD_CANNOT_BE_ENQUEUED = 2062 +ERR_SCROLL_OUT_OF_RESULT_SET = 2063 +ERR_POOL_MAX_LESS_THAN_MIN = 2064 +ERR_ARROW_SPARSE_VECTOR_NOT_ALLOWED = 2065 +ERR_EMPTY_STATEMENT = 2066 +ERR_WRONG_DIRECT_PATH_DATA_TYPE = 2067 +ERR_SCROLL_NOT_SUPPORTED = 2068 +ERR_WRONG_REQUESTED_SCHEMA_LENGTH = 2069 + +# error numbers that result in NotSupportedError +ERR_TIME_NOT_SUPPORTED = 3000 +ERR_FEATURE_NOT_SUPPORTED = 3001 +ERR_PYTHON_VALUE_NOT_SUPPORTED = 3002 +ERR_PYTHON_TYPE_NOT_SUPPORTED = 3003 +ERR_UNSUPPORTED_TYPE_SET = 3004 +ERR_ARRAYS_OF_ARRAYS = 3005 +ERR_ORACLE_TYPE_NOT_SUPPORTED = 3006 +ERR_DB_TYPE_NOT_SUPPORTED = 3007 +ERR_UNSUPPORTED_INBAND_NOTIFICATION = 3008 +ERR_SELF_BIND_NOT_SUPPORTED = 3009 +ERR_SERVER_VERSION_NOT_SUPPORTED = 3010 +ERR_NCHAR_CS_NOT_SUPPORTED = 3012 +ERR_UNSUPPORTED_PYTHON_TYPE_FOR_DB_TYPE = 3013 +ERR_LOB_OF_WRONG_TYPE = 3014 +ERR_UNSUPPORTED_VERIFIER_TYPE = 3015 +ERR_NO_CRYPTOGRAPHY_PACKAGE = 3016 +ERR_ORACLE_TYPE_NAME_NOT_SUPPORTED = 3017 +ERR_TDS_TYPE_NOT_SUPPORTED = 3018 +ERR_OSON_NODE_TYPE_NOT_SUPPORTED = 3019 +ERR_OSON_FIELD_NAME_LIMITATION = 3020 +ERR_OSON_VERSION_NOT_SUPPORTED = 3021 +ERR_NAMED_TIMEZONE_NOT_SUPPORTED = 3022 +ERR_VECTOR_VERSION_NOT_SUPPORTED = 3023 +ERR_VECTOR_FORMAT_NOT_SUPPORTED = 3024 +ERR_OPERATION_NOT_SUPPORTED_ON_BFILE = 3025 +ERR_OPERATION_ONLY_SUPPORTED_ON_BFILE = 3026 +ERR_CURSOR_DIFF_CONNECTION = 3027 +ERR_UNSUPPORTED_PIPELINE_OPERATION = 3028 +ERR_INVALID_NETWORK_NAME = 3029 +ERR_ARROW_UNSUPPORTED_DATA_TYPE = 3030 +ERR_ARROW_UNSUPPORTED_VECTOR_FORMAT = 3031 +ERR_ARROW_UNSUPPORTED_DATA_FORMAT = 3032 +ERR_ARROW_UNSUPPORTED_CHILD_DATA_FORMAT = 3033 +ERR_SESSIONLESS_DIFFERING_METHODS = 3034 +ERR_SESSIONLESS_ALREADY_ACTIVE = 3035 +ERR_SESSIONLESS_INACTIVE = 3036 +ERR_UNSUPPORTED_ARROW_TYPE = 3037 +ERR_CANNOT_CONVERT_TO_ARROW_TYPE = 3038 +ERR_CANNOT_CONVERT_FROM_ARROW_TYPE = 3039 + +# error numbers that result in DatabaseError +ERR_TNS_ENTRY_NOT_FOUND = 4000 +ERR_NO_CREDENTIALS = 4001 +ERR_COLUMN_TRUNCATED = 4002 +ERR_ORACLE_NUMBER_NO_REPR = 4003 +ERR_INVALID_NUMBER = 4004 +ERR_POOL_NO_CONNECTION_AVAILABLE = 4005 +ERR_ARRAY_DML_ROW_COUNTS_NOT_ENABLED = 4006 +ERR_INCONSISTENT_DATATYPES = 4007 +ERR_INVALID_BIND_NAME = 4008 +ERR_WRONG_NUMBER_OF_POSITIONAL_BINDS = 4009 +ERR_MISSING_BIND_VALUE = 4010 +ERR_CONNECTION_CLOSED = 4011 +ERR_NUMBER_WITH_INVALID_EXPONENT = 4012 +ERR_NUMBER_STRING_OF_ZERO_LENGTH = 4013 +ERR_NUMBER_STRING_TOO_LONG = 4014 +ERR_NUMBER_WITH_EMPTY_EXPONENT = 4015 +ERR_CONTENT_INVALID_AFTER_NUMBER = 4016 +ERR_INVALID_CONNECT_DESCRIPTOR = 4017 +ERR_CANNOT_PARSE_CONNECT_STRING = 4018 +ERR_INVALID_REDIRECT_DATA = 4019 +ERR_INVALID_PROTOCOL = 4021 +ERR_INVALID_ENUM_VALUE = 4022 +ERR_CALL_TIMEOUT_EXCEEDED = 4024 +ERR_INVALID_REF_CURSOR = 4025 +ERR_MISSING_FILE = 4026 +ERR_NO_CONFIG_DIR = 4027 +ERR_INVALID_SERVER_TYPE = 4028 +ERR_TOO_MANY_BATCH_ERRORS = 4029 +ERR_IFILE_CYCLE_DETECTED = 4030 +ERR_INVALID_VECTOR = 4031 +ERR_INVALID_SSL_VERSION = 4032 +ERR_EXCEEDED_IDLE_TIME = 4033 +ERR_INVALID_PASSWORD_TYPE = 4034 +ERR_INVALID_SERVER_RESPONSE = 4035 +ERR_CANNOT_CONVERT_TO_ARROW_INTEGER = 4036 +ERR_CANNOT_CONVERT_TO_ARROW_DOUBLE = 4037 +ERR_INVALID_INTEGER = 4038 +ERR_CANNOT_CONVERT_TO_ARROW_FLOAT = 4039 +ERR_ARROW_FIXED_SIZE_BINARY_VIOLATED = 4040 +ERR_DPL_TOO_MUCH_DATA = 4041 + +# error numbers that result in InternalError +ERR_MESSAGE_TYPE_UNKNOWN = 5000 +ERR_BUFFER_LENGTH_INSUFFICIENT = 5001 +ERR_INTEGER_TOO_LARGE = 5002 +ERR_UNEXPECTED_NEGATIVE_INTEGER = 5003 +ERR_UNEXPECTED_DATA = 5004 +ERR_UNEXPECTED_REFUSE = 5005 +ERR_UNEXPECTED_END_OF_DATA = 5006 +ERR_UNEXPECTED_XML_TYPE = 5007 +ERR_UNKNOWN_SERVER_PIGGYBACK = 5009 +ERR_UNKNOWN_TRANSACTION_STATE = 5010 +ERR_UNEXPECTED_PIPELINE_FAILURE = 5011 +ERR_NOT_IMPLEMENTED = 5012 +ERR_INTERNAL_CREATION_REQUIRED = 5013 +ERR_UNKNOWN_TRANSACTION_SYNC_VERSION = 5014 + +# error numbers that result in OperationalError +ERR_LISTENER_REFUSED_CONNECTION = 6000 +ERR_INVALID_SERVICE_NAME = 6001 +ERR_INVALID_SERVER_CERT_DN = 6002 +ERR_INVALID_SID = 6003 +ERR_PROXY_FAILURE = 6004 +ERR_CONNECTION_FAILED = 6005 +ERR_INVALID_SERVER_NAME = 6006 + +# error numbers that result in Warning +WRN_COMPILATION_ERROR = 7000 + +# error numbers that result in DataError +ERR_VALUE_TOO_LARGE = 8000 +ERR_NULLS_NOT_ALLOWED = 8001 +ERR_ARROW_DATA_STRUCTURE_OVERFLOW = 8002 + +# Oracle error number cross reference +ERR_ORACLE_ERROR_XREF = { + 22: ERR_CONNECTION_CLOSED, + 28: ERR_CONNECTION_CLOSED, + 31: ERR_CONNECTION_CLOSED, + 45: ERR_CONNECTION_CLOSED, + 378: ERR_CONNECTION_CLOSED, + 600: ERR_CONNECTION_CLOSED, + 602: ERR_CONNECTION_CLOSED, + 603: ERR_CONNECTION_CLOSED, + 609: ERR_CONNECTION_CLOSED, + 1005: ERR_NO_CREDENTIALS, + 1012: ERR_CONNECTION_CLOSED, + 1041: ERR_CONNECTION_CLOSED, + 1043: ERR_CONNECTION_CLOSED, + 1089: ERR_CONNECTION_CLOSED, + 1092: ERR_CONNECTION_CLOSED, + 1740: ERR_MISSING_ENDING_DOUBLE_QUOTE, + 1756: ERR_MISSING_ENDING_SINGLE_QUOTE, + 2396: ERR_CONNECTION_CLOSED, + 3113: ERR_CONNECTION_CLOSED, + 3114: ERR_CONNECTION_CLOSED, + 3122: ERR_CONNECTION_CLOSED, + 3135: ERR_CONNECTION_CLOSED, + 12153: ERR_CONNECTION_CLOSED, + 12537: ERR_CONNECTION_CLOSED, + 12547: ERR_CONNECTION_CLOSED, + 12570: ERR_CONNECTION_CLOSED, + 12583: ERR_CONNECTION_CLOSED, + 22165: ( + ERR_INVALID_COLL_INDEX_SET, + r"index \[(?P\d+)\] must be in the range of " + r"\[(?P\d+)\] to \[(?P\d+)\]", + ), + 22303: (ERR_INVALID_OBJECT_TYPE_NAME, r'type "(?P[^"]*"."[^"]*)"'), + 24422: ERR_POOL_HAS_BUSY_CONNECTIONS, + 24349: ERR_ARRAY_DML_ROW_COUNTS_NOT_ENABLED, + 24457: ERR_POOL_NO_CONNECTION_AVAILABLE, + 24459: ERR_POOL_NO_CONNECTION_AVAILABLE, + 24496: ERR_POOL_NO_CONNECTION_AVAILABLE, + 24338: ERR_INVALID_REF_CURSOR, + 24344: WRN_COMPILATION_ERROR, + 26202: ERR_SESSIONLESS_INACTIVE, + 26211: ERR_SESSIONLESS_DIFFERING_METHODS, + 26216: ERR_SESSIONLESS_ALREADY_ACTIVE, + 27146: ERR_CONNECTION_CLOSED, + 28511: ERR_CONNECTION_CLOSED, + 38902: ERR_TOO_MANY_BATCH_ERRORS, + 56600: ERR_CONNECTION_CLOSED, +} + +# ODPI-C error number cross reference +ERR_DPI_ERROR_XREF = { + 1010: ERR_NOT_CONNECTED, + 1024: (ERR_INVALID_COLL_INDEX_GET, r"at index (?P\d+) does"), + 1027: ERR_SCROLL_OUT_OF_RESULT_SET, + 1043: ERR_INVALID_NUMBER, + 1044: ERR_ORACLE_NUMBER_NO_REPR, + 1063: ERR_EXECUTE_MODE_ONLY_FOR_DML, + 1067: (ERR_CALL_TIMEOUT_EXCEEDED, r"call timeout of (?P\d+) ms"), + 1080: ERR_CONNECTION_CLOSED, +} + +# Oracle error codes that result in IntegrityError exceptions +ERR_INTEGRITY_ERROR_CODES = [ + 1, # unique constraint violated + 1400, # cannot insert NULL + 1438, # value larger than specified precision + 2290, # check constraint violated + 2291, # integrity constraint violated - parent key not found + 2292, # integrity constraint violated - child record found + 21525, # attribute or collection element violated its constraints + 40479, # internal JSON serializer error +] + +# Oracle error codes that result in InterfaceError exceptions +ERR_INTERFACE_ERROR_CODES = [ + 24422, # error occurred while trying to destroy the Session Pool +] + +# Oracle error codes that result in OperationalError exceptions +ERR_OPERATIONAL_ERROR_CODES = [ + 22, # invalid session ID; access denied + 378, # buffer pools cannot be created as specified + 600, # internal error code + 602, # internal programming exception + 603, # ORACLE server session terminated by fatal error + 604, # error occurred at recursive SQL level + 609, # could not attach to incoming connection + 1012, # not logged on + 1013, # user requested cancel of current operation + 1033, # ORACLE initialization or shutdown in progress + 1034, # ORACLE not available + 1041, # internal error. hostdef extension doesn't exist + 1043, # user side memory corruption + 1089, # immediate shutdown or close in progress + 1090, # shutdown in progress - connection is not permitted + 1092, # ORACLE instance terminated. Disconnection forced + 3111, # break received on communication channel + 3113, # end-of-file on communication channel + 3114, # not connected to ORACLE + 3122, # attempt to close ORACLE-side window on user side + 3135, # connection lost contact + 12153, # TNS:not connected + 12203, # TNS:unable to connect to destination + 12500, # TNS:listener failed to start a dedicated server process + 12571, # TNS:packet writer failure + 27146, # post/wait initialization failed + 28511, # lost RPC connection to heterogeneous remote agent +] + +# Oracle error codes that are deemed recoverable +# NOTE: this does not include the errors that are mapped to +# ERR_CONNECTION_CLOSED since those are all deemed recoverable +ERR_RECOVERABLE_ERROR_CODES = [ + 376, # file %s cannot be read at this time + 1033, # ORACLE initialization or shutdown in progress + 1034, # the Oracle instance is not available for use + 1090, # shutdown in progress + 1115, # IO error reading block from file %s (block # %s) + 12514, # Service %s is not registered with the listener + 12571, # TNS:packet writer failure + 12757, # instance does not currently know of requested service + 16456, # missing or invalid value +] + +# driver error message exception types (multiples of 1000) +ERR_EXCEPTION_TYPES = { + 1: exceptions.InterfaceError, + 2: exceptions.ProgrammingError, + 3: exceptions.NotSupportedError, + 4: exceptions.DatabaseError, + 5: exceptions.InternalError, + 6: exceptions.OperationalError, + 7: exceptions.Warning, + 8: exceptions.DataError, +} + +# error messages that have a troubleshooting section available +ERR_TROUBLESHOOTING_AVAILABLE = set( + [ + "DPI-1047", # Oracle Client library cannot be loaded + "DPI-1072", # Oracle Client library version is unsupported + "DPY-3010", # connections to Oracle Database version not supported + "DPY-3015", # password verifier type is not supported + "DPY-4011", # the database or network closed the connection + ] +) + +# error message formats +ERR_MESSAGE_FORMATS = { + ERR_ACCESS_TOKEN_REQUIRES_TCPS: ( + "access_token requires use of the tcps protocol" + ), + ERR_ARGS_AND_KEYWORD_ARGS: ( + "expecting positional arguments or keyword arguments, not both" + ), + ERR_ARGS_MUST_BE_LIST_OR_TUPLE: "arguments must be a list or tuple", + ERR_ARRAY_DML_ROW_COUNTS_NOT_ENABLED: ( + "array DML row counts mode is not enabled" + ), + ERR_ARRAYS_OF_ARRAYS: "arrays of arrays are not supported", + ERR_ARROW_C_API_ERROR: ( + "Apache Arrow C Data Interface operation failed with error code {code}" + ), + ERR_ARROW_DATA_STRUCTURE_OVERFLOW: ( + "Apache Arrow C Data structure overflow detected. A larger structure " + "is needed." + ), + ERR_ARROW_FIXED_SIZE_BINARY_VIOLATED: ( + "value of length {actual_len} does not match the Apache Arrow fixed " + "size binary length of {fixed_size_len}" + ), + ERR_ARROW_SPARSE_VECTOR_NOT_ALLOWED: ( + "Apache Arrow format does not support sparse vectors with flexible " + "dimensions" + ), + ERR_ARROW_UNSUPPORTED_CHILD_DATA_FORMAT: ( + "conversion from Apache Arrow list with child format " + '"{schema_format}" to Oracle Database vector is not supported' + ), + ERR_ARROW_UNSUPPORTED_DATA_FORMAT: ( + 'conversion from Apache Arrow format "{schema_format}" to Oracle ' + "Database is not supported" + ), + ERR_ARROW_UNSUPPORTED_DATA_TYPE: ( + "conversion from Oracle Database type {db_type_name} to Apache " + "Arrow format is not supported" + ), + ERR_ARROW_UNSUPPORTED_VECTOR_FORMAT: ( + "flexible vector formats are not supported. Only fixed 'FLOAT32', " + "'FLOAT64', 'INT8' or 'BINARY' formats are supported" + ), + ERR_BUFFER_LENGTH_INSUFFICIENT: ( + "internal error: buffer of length {actual_buffer_len} " + "insufficient to hold {required_buffer_len} bytes" + ), + ERR_CALL_TIMEOUT_EXCEEDED: "call timeout of {timeout} ms exceeded", + ERR_CANNOT_CONVERT_FROM_ARROW_TYPE: ( + 'Apache Arrow type "{arrow_type}" cannot be converted to database ' + 'type "{db_type}"' + ), + ERR_CANNOT_CONVERT_TO_ARROW_DOUBLE: ( + "{value} cannot be converted to an Apache Arrow double" + ), + ERR_CANNOT_CONVERT_TO_ARROW_FLOAT: ( + "{value} cannot be converted to an Apache Arrow float" + ), + ERR_CANNOT_CONVERT_TO_ARROW_INTEGER: ( + "{value} cannot be converted to an Apache Arrow integer" + ), + ERR_CANNOT_CONVERT_TO_ARROW_TYPE: ( + 'database type "{db_type}" cannot be converted to Apache Arrow type ' + '"{arrow_type}"' + ), + ERR_CANNOT_PARSE_CONNECT_STRING: 'cannot parse connect string "{data}"', + ERR_COLUMN_TRUNCATED: ( + "column truncated to {col_value_len} {unit}. " + "Untruncated was {actual_len}" + ), + ERR_CONNECTION_CLOSED: "the database or network closed the connection", + ERR_CONNECTION_FAILED: ( + "cannot connect to database (CONNECTION_ID={connection_id})." + ), + ERR_CONTENT_INVALID_AFTER_NUMBER: "invalid number (content after number)", + ERR_CURSOR_DIFF_CONNECTION: ( + "binding a cursor from a different connection is not supported" + ), + ERR_CURSOR_HAS_BEEN_CLOSED: "cursor has been closed by the database", + ERR_CURSOR_NOT_OPEN: "cursor is not open", + ERR_DBOBJECT_ATTR_MAX_SIZE_VIOLATED: ( + "attribute {attr_name} of type {type_name} exceeds its maximum size " + "(actual: {actual_size}, maximum: {max_size})" + ), + ERR_DBOBJECT_ELEMENT_MAX_SIZE_VIOLATED: ( + "element {index} of type {type_name} exceeds its maximum size " + "(actual: {actual_size}, maximum: {max_size})" + ), + ERR_DB_TYPE_NOT_SUPPORTED: 'database type "{name}" is not supported', + ERR_DML_RETURNING_DUP_BINDS: ( + 'the bind variable placeholder ":{name}" cannot be used both before ' + "and after the RETURNING clause in a DML RETURNING statement" + ), + ERR_DPL_TOO_MUCH_DATA: ( + "the maximum size of a Direct Path load has been exceeded" + ), + ERR_DUPLICATED_PARAMETER: ( + '"{deprecated_name}" and "{new_name}" cannot be specified together' + ), + ERR_EMPTY_STATEMENT: ("an empty statement cannot be executed"), + ERR_EXCEEDED_IDLE_TIME: ( + "the database closed the connection because the connection's idle " + "time has been exceeded" + ), + ERR_EXECUTE_MODE_ONLY_FOR_DML: ( + 'parameters "batcherrors" and "arraydmlrowcounts" may only be ' + "true when used with insert, update, delete and merge statements" + ), + ERR_EXPECTING_LIST_FOR_ARRAY_VAR: ( + "expecting list when setting array variables" + ), + ERR_EXPECTING_TYPE: "expected a type", + ERR_EXPECTING_VAR: ( + "type handler should return None or the value returned by a call " + "to cursor.var()" + ), + ERR_EXPIRED_ACCESS_TOKEN: "access token has expired", + ERR_FEATURE_NOT_SUPPORTED: ( + "{feature} is only supported in python-oracledb {driver_type} mode" + ), + ERR_HTTPS_PROXY_REQUIRES_TCPS: ( + "https_proxy requires use of the tcps protocol" + ), + ERR_IFILE_CYCLE_DETECTED: ( + "file '{including_file_name}' includes file '{included_file_name}', " + "which forms a cycle" + ), + ERR_INCONSISTENT_DATATYPES: ( + "cannot convert from data type {input_type} to {output_type}" + ), + ERR_INCORRECT_VAR_ARRAYSIZE: ( + "variable array size of {var_arraysize} is " + "too small (should be at least {required_arraysize})" + ), + ERR_INIT_ORACLE_CLIENT_NOT_CALLED: ( + "init_oracle_client() must be called first" + ), + ERR_INTEGER_TOO_LARGE: ( + "internal error: read integer of length {length} when expecting " + "integer of no more than length {max_length}" + ), + ERR_INTERNAL_CREATION_REQUIRED: "object may not be created directly", + ERR_INVALID_ACCESS_TOKEN_PARAM: ( + "invalid access token: value must be a string (for OAuth), a " + "2-tuple containing the token and private key strings (for IAM), " + "or a callable that returns a string or 2-tuple" + ), + ERR_INVALID_ACCESS_TOKEN_RETURNED: ( + "invalid access token returned from callable: value must be a " + "string (for OAuth) or a 2-tuple containing the token and private " + "key strings (for IAM)" + ), + ERR_INVALID_ARRAYSIZE: "arraysize must be an integer greater than zero", + ERR_INVALID_BIND_NAME: ( + 'no bind placeholder named ":{name}" was found in the SQL text' + ), + ERR_INVALID_CONN_CLASS: "invalid connection class", + ERR_INVALID_CONNECT_DESCRIPTOR: 'invalid connect descriptor "{data}"', + ERR_INVALID_CONNECT_PARAMS: "invalid connection params", + ERR_INVALID_COLL_INDEX_GET: "element at index {index} does not exist", + ERR_INVALID_COLL_INDEX_SET: ( + "given index {index} must be in the range of {min_index} to " + "{max_index}" + ), + ERR_INVALID_ENUM_VALUE: "invalid value for enumeration {name}: {value}", + ERR_INVALID_INTEGER: ( + "integer {value} cannot be represented as Apache Arrow type " + "{arrow_type}" + ), + ERR_INVALID_LOB_AMOUNT: "LOB amount must be greater than zero", + ERR_INVALID_LOB_OFFSET: "LOB offset must be greater than zero", + ERR_INVALID_MAKEDSN_ARG: '"{name}" argument contains invalid values', + ERR_INVALID_NETWORK_NAME: ( + '"{name}" includes characters that are not allowed' + ), + ERR_INVALID_NUMBER: "invalid number", + ERR_INVALID_OBJECT_TYPE_NAME: 'invalid object type name: "{name}"', + ERR_INVALID_OCI_ATTR_TYPE: "invalid OCI attribute type {attr_type}", + ERR_INVALID_PASSWORD_TYPE: 'invalid password type "{password_type}"', + ERR_INVALID_POOL_CLASS: "invalid connection pool class", + ERR_INVALID_POOL_PARAMS: "invalid pool params", + ERR_INVALID_PROTOCOL: 'invalid protocol "{protocol}"', + ERR_INVALID_REDIRECT_DATA: "invalid redirect data {data}", + ERR_INVALID_REF_CURSOR: "invalid REF CURSOR: never opened in PL/SQL", + ERR_INVALID_SERVER_CERT_DN: ( + "The distinguished name (DN) on the server certificate does not " + "match the expected value: {expected_dn}" + ), + ERR_INVALID_SERVER_NAME: ( + "The name on the server certificate does not match the expected " + 'value: "{expected_name}"' + ), + ERR_INVALID_SERVER_RESPONSE: ( + "invalid server response to connection request" + ), + ERR_INVALID_SERVER_TYPE: "invalid server_type: {server_type}", + ERR_INVALID_SERVICE_NAME: ( + 'Service "{service_name}" is not registered with the listener at ' + 'host "{host}" port {port}. (Similar to ORA-12514)' + ), + ERR_INVALID_SID: ( + 'SID "{sid}" is not registered with the listener at host "{host}" ' + "port {port}. (Similar to ORA-12505)" + ), + ERR_INVALID_SSL_VERSION: 'invalid value for ssl_version: "{ssl_version}"', + ERR_INVALID_TPC_BEGIN_FLAGS: "invalid flags for tpc_begin()", + ERR_INVALID_TPC_END_FLAGS: "invalid flags for tpc_end()", + ERR_INVALID_VECTOR: "vector cannot contain zero dimensions", + ERR_KEYWORD_ARGS_MUST_BE_DICT: ( + '"keyword_parameters" argument must be a dict' + ), + ERR_LIBRARY_ALREADY_INITIALIZED: ( + "init_oracle_client() was already called with different arguments" + ), + ERR_LISTENER_REFUSED_CONNECTION: ( + "Listener refused connection. (Similar to ORA-{error_code})" + ), + ERR_LOB_OF_WRONG_TYPE: ( + "LOB is of type {actual_type_name} but must be of type " + "{expected_type_name}" + ), + ERR_MESSAGE_HAS_NO_PAYLOAD: "message has no payload", + ERR_MESSAGE_TYPE_UNKNOWN: ( + "internal error: unknown protocol message type {message_type} " + "at position {position}" + ), + ERR_MISMATCHED_TOKEN: ( + "internal error: pipeline token number {token_num} does not match " + "expected token number {expected_token_num}" + ), + ERR_MISSING_ADDRESS: ( + "no addresses are defined in connect descriptor: {connect_string}" + ), + ERR_MISSING_BIND_VALUE: ( + 'a bind variable replacement value for placeholder ":{name}" was ' + "not provided" + ), + ERR_MISSING_CONNECT_DESCRIPTOR: ( + '"connect_descriptor" key missing from configuration' + ), + ERR_MISSING_ENDING_DOUBLE_QUOTE: 'missing ending quote (")', + ERR_MISSING_ENDING_SINGLE_QUOTE: "missing ending quote (')", + ERR_MISSING_FILE: "file '{file_name}' is missing or unreadable", + ERR_MISSING_TYPE_NAME_FOR_OBJECT_VAR: ( + "no object type specified for object variable" + ), + ERR_MIXED_ELEMENT_TYPES: ( + "element {element} is not the same data type as previous elements" + ), + ERR_MIXED_POSITIONAL_AND_NAMED_BINDS: ( + "positional and named binds cannot be intermixed" + ), + ERR_NAMED_POOL_EXISTS: ( + 'connection pool with alias "{alias}" already exists' + ), + ERR_NAMED_POOL_MISSING: ( + 'connection pool with alias "{alias}" does not exist' + ), + ERR_NAMED_TIMEZONE_NOT_SUPPORTED: ( + "named time zones are not supported in thin mode" + ), + ERR_NCHAR_CS_NOT_SUPPORTED: ( + "national character set id {charset_id} is not supported by " + "python-oracledb in thin mode" + ), + ERR_NO_CONFIG_DIR: "no configuration directory specified", + ERR_NO_CREDENTIALS: "no credentials specified", + ERR_NO_CRYPTOGRAPHY_PACKAGE: ( + "python-oracledb thin mode cannot be used because the " + "cryptography package cannot be imported" + ), + ERR_NO_STATEMENT: "no statement specified and no prior statement prepared", + ERR_NO_STATEMENT_EXECUTED: "no statement executed", + ERR_NO_STATEMENT_PREPARED: "statement must be prepared first", + ERR_NOT_A_QUERY: "the executed statement does not return rows", + ERR_NOT_CONNECTED: "not connected to database", + ERR_NOT_IMPLEMENTED: "not implemented", + ERR_NULLS_NOT_ALLOWED: ( + 'value for column "{column_name}" may not be null on row {row_num}' + ), + ERR_NUMBER_STRING_OF_ZERO_LENGTH: "invalid number: zero length string", + ERR_NUMBER_STRING_TOO_LONG: "invalid number: string too long", + ERR_NUMBER_WITH_EMPTY_EXPONENT: "invalid number: empty exponent", + ERR_NUMBER_WITH_INVALID_EXPONENT: "invalid number: invalid exponent", + ERR_OBJECT_IS_NOT_A_COLLECTION: "object {name} is not a collection", + ERR_OPERATION_NOT_SUPPORTED_ON_BFILE: ( + "operation is not supported on BFILE LOBs" + ), + ERR_OPERATION_ONLY_SUPPORTED_ON_BFILE: ( + "operation is only supported on BFILE LOBs" + ), + ERR_ORACLE_NUMBER_NO_REPR: ( + "value cannot be represented as an Oracle number" + ), + ERR_ORACLE_TYPE_NAME_NOT_SUPPORTED: ( + 'Oracle data type name "{name}" is not supported' + ), + ERR_ORACLE_TYPE_NOT_SUPPORTED: "Oracle data type {num} is not supported", + ERR_OSON_FIELD_NAME_LIMITATION: ( + "OSON field names may not exceed {max_fname_size} UTF-8 encoded bytes" + ), + ERR_OSON_NODE_TYPE_NOT_SUPPORTED: ( + "OSON node type 0x{node_type:x} is not supported" + ), + ERR_OSON_VERSION_NOT_SUPPORTED: "OSON version {version} is not supported", + ERR_PARAMS_HOOK_HANDLER_FAILED: ( + "registered handler for params hook failed" + ), + ERR_PASSWORD_TYPE_HANDLER_FAILED: ( + 'registered handler for password type "{password_type}" failed' + ), + ERR_PAYLOAD_CANNOT_BE_ENQUEUED: ( + "payload cannot be enqueued since it does not match the payload type " + "supported by the queue" + ), + ERR_PLAINTEXT_PASSWORD_IN_CONFIG: ( + "password in configuration must specify a type" + ), + ERR_POOL_HAS_BUSY_CONNECTIONS: ( + "connection pool cannot be closed because connections are busy" + ), + ERR_POOL_MAX_LESS_THAN_MIN: ( + "parameter 'max' should be greater than or equal to parameter 'min'" + ), + ERR_POOL_NO_CONNECTION_AVAILABLE: ( + "timed out waiting for the connection pool to return a connection" + ), + ERR_POOL_NOT_OPEN: "connection pool is not open", + ERR_PROTOCOL_HANDLER_FAILED: ( + 'registered handler for protocol "{protocol}" failed for arg "{arg}"' + ), + ERR_PROXY_FAILURE: "network proxy failed: response was {response}", + ERR_PYTHON_TYPE_NOT_SUPPORTED: "Python type {typ} is not supported", + ERR_PYTHON_VALUE_NOT_SUPPORTED: ( + 'Python value of type "{type_name}" is not supported' + ), + ERR_SCROLL_NOT_SUPPORTED: ( + "scroll operation is not supported on a non-scrollable cursor" + ), + ERR_SCROLL_OUT_OF_RESULT_SET: ( + "scroll operation would go out of the result set" + ), + ERR_SELF_BIND_NOT_SUPPORTED: "binding to self is not supported", + ERR_SERVER_VERSION_NOT_SUPPORTED: ( + "connections to this database server version are not supported " + "by python-oracledb in thin mode" + ), + ERR_SESSIONLESS_ALREADY_ACTIVE: ( + "suspend, commit, or rollback the current active sessionless " + "transaction before beginning or resuming another one" + ), + ERR_SESSIONLESS_DIFFERING_METHODS: ( + "suspending or resuming a Sessionless Transaction can be done with " + "DBMS_TRANSACTION or with python-oracledb, but not both" + ), + ERR_SESSIONLESS_INACTIVE: ("no Sessionless Transaction is active"), + ERR_TDS_TYPE_NOT_SUPPORTED: "Oracle TDS data type {num} is not supported", + ERR_THICK_MODE_ENABLED: ( + "python-oracledb thin mode cannot be used because thick mode has " + "already been enabled" + ), + ERR_THIN_CONNECTION_ALREADY_CREATED: ( + "python-oracledb thick mode cannot be used because thin mode has " + "already been enabled or a thin mode connection has already been " + "created" + ), + ERR_TIME_NOT_SUPPORTED: ( + "Oracle Database does not support time only variables" + ), + ERR_TNS_ENTRY_NOT_FOUND: 'unable to find "{name}" in {file_name}', + ERR_TOO_MANY_BATCH_ERRORS: ( + "the number of batch errors from executemany() exceeds 65535" + ), + ERR_UNEXPECTED_DATA: "unexpected data received: {data}", + ERR_UNEXPECTED_END_OF_DATA: ( + "unexpected end of data: want {num_bytes_wanted} bytes but " + "only {num_bytes_available} bytes are available" + ), + ERR_UNEXPECTED_NEGATIVE_INTEGER: ( + "internal error: read a negative integer when expecting a " + "positive integer" + ), + ERR_UNEXPECTED_REFUSE: ( + "the listener refused the connection but an unexpected error " + "format was returned" + ), + ERR_UNEXPECTED_PIPELINE_FAILURE: "unexpected pipeline failure", + ERR_UNEXPECTED_XML_TYPE: "unexpected XMLType with flag {flag}", + ERR_UNKNOWN_SERVER_PIGGYBACK: ( + "internal error: unknown server side piggyback opcode {opcode}" + ), + ERR_UNKNOWN_TRANSACTION_STATE: ( + "internal error: unknown transaction state {state}" + ), + ERR_UNKNOWN_TRANSACTION_SYNC_VERSION: ( + "internal error: unknown transaction sync version {version}" + ), + ERR_UNSUPPORTED_ARROW_TYPE: 'unsupported Apache Arrow type "{arrow_type}"', + ERR_UNSUPPORTED_INBAND_NOTIFICATION: ( + "unsupported in-band notification with error number {err_num}" + ), + ERR_UNSUPPORTED_PIPELINE_OPERATION: ( + "unsupported pipeline operation type: {op_type}" + ), + ERR_UNSUPPORTED_PYTHON_TYPE_FOR_DB_TYPE: ( + "unsupported Python type {py_type_name} for database type " + "{db_type_name}" + ), + ERR_UNSUPPORTED_TYPE_SET: "type {db_type_name} does not support being set", + ERR_UNSUPPORTED_VERIFIER_TYPE: ( + "password verifier type 0x{verifier_type:x} is not supported by " + "python-oracledb in thin mode" + ), + ERR_VALUE_TOO_LARGE: ( + "value of size {actual_size} exeeds maximum allowed size of " + '{max_size} for column "{column_name}" of row {row_num}' + ), + ERR_VECTOR_FORMAT_NOT_SUPPORTED: ( + "VECTOR type {vector_format} is not supported" + ), + ERR_VECTOR_VERSION_NOT_SUPPORTED: ( + "VECTOR version {version} is not supported" + ), + ERR_WALLET_FILE_MISSING: "wallet file {name} was not found", + ERR_WRONG_ARRAY_DEFINITION: ( + "expecting a list of two elements [type, numelems]" + ), + ERR_WRONG_DIRECT_PATH_DATA_TYPE: ( + "expecting a list or an object implementing the Apache Arrow " + "PyCapsule interface __arrow_c_stream__()" + ), + ERR_WRONG_EXECUTE_PARAMETERS_TYPE: ( + "expecting a dictionary, list or tuple, or keyword args" + ), + ERR_WRONG_EXECUTEMANY_PARAMETERS_TYPE: ( + '"parameters" argument should be a list of sequences or ' + "dictionaries, or an integer specifying the number of " + "times to execute the statement, or an object implementing the Apache " + "Arrow PyCapsule interface __arrow_c_stream__()" + ), + ERR_WRONG_NUMBER_OF_POSITIONAL_BINDS: ( + "{expected_num} positional bind values are required but " + "{actual_num} were provided" + ), + ERR_WRONG_OBJECT_TYPE: ( + 'found object of type "{actual_schema}.{actual_name}" when ' + 'expecting object of type "{expected_schema}.{expected_name}"' + ), + ERR_WRONG_REQUESTED_SCHEMA_LENGTH: ( + "requested schema has {num_schema_columns} columns defined but " + "{num_fetched_columns} are being fetched" + ), + ERR_WRONG_SCROLL_MODE: ( + "scroll mode must be relative, absolute, first or last" + ), + WRN_COMPILATION_ERROR: "creation succeeded with compilation errors", +} diff --git a/.venv/lib/python3.9/site-packages/oracledb/exceptions.py b/.venv/lib/python3.9/site-packages/oracledb/exceptions.py new file mode 100644 index 0000000..b262cac --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/exceptions.py @@ -0,0 +1,125 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2020, 2023, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# exceptions.py +# +# Contains the exception classes mandated by the Python Database API. +# ----------------------------------------------------------------------------- + + +class Warning(Exception): + """ + Exception raised for warnings. + + Exception messages of this class will have the prefix DPY and an error + number in the range 9000 - 9999. + """ + + +class Error(Exception): + """ + Exception that is the base class of all other exceptions defined by + python-oracledb. + """ + + +class DatabaseError(Error): + """ + Exception raised for errors that are related to the database. It is a + subclass of Error. + + Exception messages of this class will have the prefix DPY and an error + number in the range 4000 - 4999. + """ + + +class DataError(DatabaseError): + """ + Exception raised for errors that are due to problems with the processed + data. It is a subclass of DatabaseError. + + Exception messages of this class are generated by the database and will + have a prefix such as ORA. + """ + + +class IntegrityError(DatabaseError): + """ + Exception raised when the relational integrity of the database is affected. + It is a subclass of DatabaseError. + + Exception messages of this class are generated by the database and will + have a prefix such as ORA. + """ + + +class InterfaceError(Error): + """ + Exception raised for errors that are related to the database interface + rather than the database itself. It is a subclass of Error. + + Exception messages of this class will have the prefix DPY and an error + number in the range 1000 - 1999. + """ + + +class InternalError(DatabaseError): + """ + Exception raised when the database encounters an internal error. It is a + subclass of DatabaseError. + + Exception messages of this class will have the prefix DPY and an error + number in the range 5000 - 5999. + """ + + +class NotSupportedError(DatabaseError): + """ + Exception raised when a method or database API was used which is not + supported by the database. It is a subclass of DatabaseError. + + Exception messages of this class will have the prefix DPY and an error + number in the range 3000 - 3999. + """ + + +class OperationalError(DatabaseError): + """ + Exception raised for errors that are related to the operation of the + database but are not necessarily under the control of the programmer. It is + a subclass of DatabaseError. + + Exception messages of this class will have the prefix DPY and an error + number in the range 6000 - 6999. + """ + + +class ProgrammingError(DatabaseError): + """ + Exception raised for programming errors. It is a subclass of DatabaseError. + + Exception messages of this class will have the prefix DPY and an error + number in the range 2000 - 2999. + """ diff --git a/.venv/lib/python3.9/site-packages/oracledb/fetch_info.py b/.venv/lib/python3.9/site-packages/oracledb/fetch_info.py new file mode 100644 index 0000000..a690027 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/fetch_info.py @@ -0,0 +1,310 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2023, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# fetch_info.py +# +# Contains the FetchInfo class which stores metadata about columns that are +# being fetched. +# ----------------------------------------------------------------------------- + +from typing import Union + +import oracledb + +from . import constants +from .base import BaseMetaClass +from .base_impl import ( + DbType, + DB_TYPE_DATE, + DB_TYPE_TIMESTAMP, + DB_TYPE_TIMESTAMP_LTZ, + DB_TYPE_TIMESTAMP_TZ, + DB_TYPE_BINARY_FLOAT, + DB_TYPE_BINARY_DOUBLE, + DB_TYPE_BINARY_INTEGER, + DB_TYPE_NUMBER, + DB_TYPE_VECTOR, +) +from .dbobject import DbObjectType + + +class FetchInfo(metaclass=BaseMetaClass): + """ + Identifies metadata of columns that are being fetched. + """ + + def __eq__(self, other): + return tuple(self) == other + + def __getitem__(self, index): + """ + Return the parts mandated by the Python Database API. + """ + if index == 0 or index == -7: + return self.name + elif index == 1 or index == -6: + return self.type_code + elif index == 2 or index == -5: + return self.display_size + elif index == 3 or index == -4: + return self.internal_size + elif index == 4 or index == -3: + return self.precision + elif index == 5 or index == -2: + return self.scale + elif index == 6 or index == -1: + return self.null_ok + elif isinstance(index, slice): + return tuple(self).__getitem__(index) + raise IndexError("list index out of range") + + def __len__(self): + """ + Length mandated by the Python Database API. + """ + return 7 + + def __repr__(self): + return repr(tuple(self)) + + def __str__(self): + return str(tuple(self)) + + @classmethod + def _from_impl(cls, impl): + info = cls.__new__(cls) + info._impl = impl + info._type = None + return info + + @property + def annotations(self) -> Union[dict, None]: + """ + This read-only attribute returns a dictionary containing the + `annotations `__ associated with the + fetched column. If there are no annotations, the value *None* is + returned. Annotations require Oracle Database version 23, or later. If + using python-oracledb Thick mode, Oracle Client version 23 or later is + also required. + """ + return self._impl.annotations + + @property + def display_size(self) -> Union[int, None]: + """ + This read-only attribute returns the display size of the column. + """ + if self._impl.max_size > 0: + return self._impl.max_size + dbtype = self._impl.dbtype + if ( + dbtype is DB_TYPE_DATE + or dbtype is DB_TYPE_TIMESTAMP + or dbtype is DB_TYPE_TIMESTAMP_LTZ + or dbtype is DB_TYPE_TIMESTAMP_TZ + ): + return 23 + elif ( + dbtype is DB_TYPE_BINARY_FLOAT + or dbtype is DB_TYPE_BINARY_DOUBLE + or dbtype is DB_TYPE_BINARY_INTEGER + or dbtype is DB_TYPE_NUMBER + ): + if self._impl.precision: + display_size = self._impl.precision + 1 + if self._impl.scale > 0: + display_size += self._impl.scale + 1 + else: + display_size = 127 + return display_size + + @property + def domain_name(self) -> Union[str, None]: + """ + This read-only attribute returns the name of the `data use case + domain + `__ associated with the fetched column. If + there is no data use case domain, the value *None* is returned. `Data + use case domains `__ require Oracle + Database version 23, or later. If using python-oracledb Thick mode, + Oracle Client version 23 or later is also required. + """ + return self._impl.domain_name + + @property + def domain_schema(self) -> Union[str, None]: + """ + This read-only attribute returns the schema of the `data use case + domain `__ associated with the fetched + column. If there is no data use case domain, the value *None* is + returned. `Data use case domains `__ + require Oracle Database version 23, or later. If using python-oracledb + Thick mode, Oracle Client version 23 or later is also required. + """ + return self._impl.domain_schema + + @property + def internal_size(self) -> Union[int, None]: + """ + This read-only attribute returns the internal size of the column as + mandated by the Python Database API. + """ + if self._impl.max_size > 0: + return self._impl.buffer_size + + @property + def is_json(self) -> bool: + """ + This read-only attribute returns whether the column is known to contain + JSON data. This will be *True* when the type code is + :data:`oracledb.DB_TYPE_JSON` as well as when an "IS JSON" constraint + is enabled on LOB and VARCHAR2 columns. + """ + return self._impl.is_json + + @property + def is_oson(self) -> bool: + """ + This read-only attribute returns whether the column is known to contain + binary encoded `OSON `__ data. This + will be *True* when an "IS JSON FORMAT OSON" check constraint is + enabled on BLOB columns. + """ + return self._impl.is_oson + + @property + def name(self) -> str: + """ + This read-only attribute returns the name of the column as mandated by + the Python Database API. + """ + return self._impl.name + + @property + def null_ok(self) -> bool: + """ + This read-only attribute returns whether nulls are allowed in the + column as mandated by the Python Database API. + """ + return self._impl.nulls_allowed + + @property + def precision(self) -> Union[int, None]: + """ + This read-only attribute returns the precision of the column as + mandated by the Python Database API. + """ + if self._impl.precision or self._impl.scale: + return self._impl.precision + + @property + def scale(self) -> Union[int, None]: + """ + This read-only attribute returns the scale of the column as mandated by + the Python Database API. + """ + if self._impl.precision or self._impl.scale: + return self._impl.scale + + @property + def type(self) -> Union[DbType, DbObjectType]: + """ + This read-only attribute returns the type of the column. This will be + an :ref:`Oracle Object Type ` if the column contains + Oracle objects; otherwise, it will be one of the + :ref:`database type constants ` defined at the module level. + """ + if self._type is None: + if self._impl.objtype is not None: + self._type = DbObjectType._from_impl(self._impl.objtype) + else: + self._type = self._impl.dbtype + return self._type + + @property + def type_code(self) -> DbType: + """ + This read-only attribute returns the type of the column as mandated by + the Python Database API. The type will be one of the + :ref:`database type constants ` defined at the module level. + """ + return self._impl.dbtype + + @property + def vector_dimensions(self) -> Union[int, None]: + """ + This read-only attribute returns the number of dimensions required by + VECTOR columns. If the column is not a VECTOR column or allows for any + number of dimensions, the value returned is *None*. + """ + if self._impl.dbtype is DB_TYPE_VECTOR: + flags = self._impl.vector_flags + if not (flags & constants.VECTOR_META_FLAG_FLEXIBLE_DIM): + return self._impl.vector_dimensions + + @property + def vector_format(self) -> Union[oracledb.VectorFormat, None]: + """ + This read-only attribute returns the storage type used by VECTOR + columns. The value of this attribute can be: + + - :data:`oracledb.VECTOR_FORMAT_BINARY` which represents 8-bit unsigned + integers + - :data:`oracledb.VECTOR_FORMAT_INT8` which represents 8-bit signed + integers + - :data:`oracledb.VECTOR_FORMAT_FLOAT32` which represents 32-bit + floating-point numbers + - :data:`oracledb.VECTOR_FORMAT_FLOAT64` which represents 64-bit + floating-point numbers + + If the column is not a VECTOR column or allows for any type of storage, + the value returned is *None*. + """ + if ( + self._impl.dbtype is DB_TYPE_VECTOR + and self._impl.vector_format != 0 + ): + return oracledb.VectorFormat(self._impl.vector_format) + + @property + def vector_is_sparse(self) -> Union[bool, None]: + """ + This read-only attribute returns a boolean indicating if the vector is + sparse or not. + + If the column contains vectors that are SPARSE, the value returned is + *True*. If the column contains vectors that are DENSE, the value + returned is *False*. If the column is not a VECTOR column, the value + returned is *None*. + """ + if self._impl.dbtype is DB_TYPE_VECTOR: + flags = self._impl.vector_flags + return bool(flags & constants.VECTOR_META_FLAG_SPARSE_VECTOR) diff --git a/.venv/lib/python3.9/site-packages/oracledb/future.py b/.venv/lib/python3.9/site-packages/oracledb/future.py new file mode 100644 index 0000000..dff545d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/future.py @@ -0,0 +1,46 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2020, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# future.py +# +# Module for handling backwards incompatible changes. +# ----------------------------------------------------------------------------- + +FEATURES = [] + + +# future object used for managing backwards incompatible changes +class Future: + def __getattr__(self, name): + if name in FEATURES: + return super().__getattr__(name) + return None + + def __setattr__(self, name, value): + if name in FEATURES: + return super().__setattr__(name, value) + + +__future__ = Future() diff --git a/.venv/lib/python3.9/site-packages/oracledb/lob.py b/.venv/lib/python3.9/site-packages/oracledb/lob.py new file mode 100644 index 0000000..5581303 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/lob.py @@ -0,0 +1,318 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# lob.py +# +# Contains the LOB class for managing BLOB, CLOB, NCLOB and BFILE data. +# ----------------------------------------------------------------------------- + +from typing import Optional, Union + +from .base import BaseMetaClass +from .base_impl import DbType, DB_TYPE_BFILE, DB_TYPE_BLOB +from . import errors + + +class BaseLOB(metaclass=BaseMetaClass): + + def __del__(self): + self._impl.free_lob() + + def _check_is_bfile(self): + if self._impl.dbtype is not DB_TYPE_BFILE: + errors._raise_err(errors.ERR_OPERATION_ONLY_SUPPORTED_ON_BFILE) + + def _check_not_bfile(self): + if self._impl.dbtype is DB_TYPE_BFILE: + errors._raise_err(errors.ERR_OPERATION_NOT_SUPPORTED_ON_BFILE) + + def _check_value_to_write(self, value): + """ + Checks the value to write and returns the actual value to write. + Character LOBs must write strings but can accept UTF-8 encoded bytes + (which will be decoded to strings). Binary LOBs must write bytes but + can accept strings (which will be encoded in UTF-8). + """ + if self.type is DB_TYPE_BLOB: + if isinstance(value, str): + return value.encode() + elif isinstance(value, bytes): + return value + else: + if isinstance(value, str): + return value + elif isinstance(value, bytes): + return value.decode() + raise TypeError("expecting string or bytes") + + @classmethod + def _from_impl(cls, impl): + if isinstance(impl, BaseLOB): + return impl + lob = cls.__new__(cls) + lob._impl = impl + return lob + + def getfilename(self) -> tuple: + """ + Returns a two-tuple consisting of the directory alias and file name for + a BFILE type LOB. + """ + self._check_is_bfile() + return self._impl.get_file_name() + + def setfilename(self, dir_alias: str, name: str) -> None: + """ + Sets the directory alias and name of a BFILE type LOB. + """ + self._check_is_bfile() + self._impl.set_file_name(dir_alias, name) + + @property + def type(self) -> DbType: + """ + This read-only attribute returns the type of the LOB as one of the + database type constants. + """ + return self._impl.dbtype + + +class LOB(BaseLOB): + + def __reduce__(self): + value = self.read() + return (type(value), (value,)) + + def __str__(self): + return self.read() + + def close(self) -> None: + """ + Closes the LOB. Call this when writing is completed so that the indexes + associated with the LOB can be updated -– but only if open() was called + first. + """ + self._impl.close() + + def fileexists(self) -> bool: + """ + Returns a boolean indicating if the file referenced by a BFILE type LOB + exists. + """ + self._check_is_bfile() + return self._impl.file_exists() + + def getchunksize(self) -> int: + """ + Returns the chunk size for the LOB. Reading and writing to the LOB in + chunks of multiples of this size will improve performance. + """ + self._check_not_bfile() + return self._impl.get_chunk_size() + + def isopen(self) -> bool: + """ + Returns a boolean indicating if the LOB has been opened using the + method open(). + """ + return self._impl.get_is_open() + + def open(self) -> None: + """ + Opens the LOB for writing. This will improve performance when writing + to the LOB in chunks and there are functional or extensible indexes + associated with the LOB. If this method is not called, each write will + perform an open internally followed by a close after the write has been + completed. + """ + self._impl.open() + + def read( + self, offset: int = 1, amount: Optional[int] = None + ) -> Union[str, bytes]: + """ + Returns a portion (or all) of the data in the LOB. Note that the amount + and offset are in bytes for BLOB and BFILE type LOBs and in UCS-2 code + points for CLOB and NCLOB type LOBs. UCS-2 code points are equivalent + to characters for all but supplemental characters. If supplemental + characters are in the LOB, the offset and amount will have to be chosen + carefully to avoid splitting a character. + """ + if amount is None: + amount = self._impl.get_max_amount() + if amount >= offset: + amount = amount - offset + 1 + else: + amount = 1 + elif amount <= 0: + errors._raise_err(errors.ERR_INVALID_LOB_AMOUNT) + if offset <= 0: + errors._raise_err(errors.ERR_INVALID_LOB_OFFSET) + return self._impl.read(offset, amount) + + def size(self) -> int: + """ + Returns the size of the data in the LOB. For BLOB and BFILE type LOBs, + this is the number of bytes. For CLOB and NCLOB type LOBs, this is the + number of UCS-2 code points. UCS-2 code points are equivalent to + characters for all but supplemental characters. + """ + return self._impl.get_size() + + def trim( + self, new_size: int = 0, *, newSize: Optional[int] = None + ) -> None: + """ + Trims the LOB to the new size (the second parameter is deprecated and + should not be used). + """ + self._check_not_bfile() + if newSize is not None: + if new_size != 0: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="newSize", + new_name="new_size", + ) + new_size = newSize + self._impl.trim(new_size) + + def write(self, data: Union[str, bytes], offset: int = 1) -> None: + """ + Writes the data to the LOB at the given offset. The offset is in bytes + for BLOB type LOBs and in UCS-2 code points for CLOB and NCLOB type + LOBs. UCS-2 code points are equivalent to characters for all but + supplemental characters. If supplemental characters are in the LOB, the + offset will have to be chosen carefully to avoid splitting a character. + Note that if you want to make the LOB value smaller, you must use the + trim() function. + """ + self._check_not_bfile() + self._impl.write(self._check_value_to_write(data), offset) + + +class AsyncLOB(BaseLOB): + + async def close(self) -> None: + """ + Closes the LOB. Call this when writing is completed so that the indexes + associated with the LOB can be updated -– but only if open() was called + first. + """ + await self._impl.close() + + async def fileexists(self) -> bool: + """ + Returns a boolean indicating if the file referenced by a BFILE type LOB + exists. + """ + self._check_is_bfile() + return await self._impl.file_exists() + + async def getchunksize(self) -> int: + """ + Returns the chunk size for the LOB. Reading and writing to the LOB in + chunks of multiples of this size will improve performance. + """ + self._check_not_bfile() + return await self._impl.get_chunk_size() + + async def isopen(self) -> bool: + """ + Returns a boolean indicating if the LOB has been opened using the + method open(). + """ + return await self._impl.get_is_open() + + async def open(self) -> None: + """ + Opens the LOB for writing. This will improve performance when writing + to the LOB in chunks and there are functional or extensible indexes + associated with the LOB. If this method is not called, each write will + perform an open internally followed by a close after the write has been + completed. + """ + await self._impl.open() + + async def read( + self, offset: int = 1, amount: Optional[int] = None + ) -> Union[str, bytes]: + """ + Returns a portion (or all) of the data in the LOB. Note that the amount + and offset are in bytes for BLOB and BFILE type LOBs and in UCS-2 code + points for CLOB and NCLOB type LOBs. UCS-2 code points are equivalent + to characters for all but supplemental characters. If supplemental + characters are in the LOB, the offset and amount will have to be chosen + carefully to avoid splitting a character. + """ + if amount is None: + amount = self._impl.get_max_amount() + if amount >= offset: + amount = amount - offset + 1 + else: + amount = 1 + if offset <= 0: + errors._raise_err(errors.ERR_INVALID_LOB_OFFSET) + return await self._impl.read(offset, amount) + + async def size(self) -> int: + """ + Returns the size of the data in the LOB. For BLOB and BFILE type LOBs + this is the number of bytes. For CLOB and NCLOB type LOBs this is the + number of UCS-2 code points. UCS-2 code points are equivalent to + characters for all but supplemental characters. + """ + return await self._impl.get_size() + + async def trim( + self, new_size: int = 0, *, newSize: Optional[int] = None + ) -> None: + """ + Trims the LOB to the new size (the second parameter is deprecated and + should not be used). + """ + self._check_not_bfile() + if newSize is not None: + if new_size != 0: + errors._raise_err( + errors.ERR_DUPLICATED_PARAMETER, + deprecated_name="newSize", + new_name="new_size", + ) + new_size = newSize + await self._impl.trim(new_size) + + async def write(self, data: Union[str, bytes], offset: int = 1) -> None: + """ + Writes the data to the LOB at the given offset. The offset is in bytes + for BLOB type LOBs and in UCS-2 code points for CLOB and NCLOB type + LOBs. UCS-2 code points are equivalent to characters for all but + supplemental characters. If supplemental characters are in the LOB, the + offset will have to be chosen carefully to avoid splitting a character. + Note that if you want to make the LOB value smaller, you must use the + trim() function. + """ + self._check_not_bfile() + await self._impl.write(self._check_value_to_write(data), offset) diff --git a/.venv/lib/python3.9/site-packages/oracledb/pipeline.py b/.venv/lib/python3.9/site-packages/oracledb/pipeline.py new file mode 100644 index 0000000..bb2d056 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/pipeline.py @@ -0,0 +1,491 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2024, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# pipeline.py +# +# Contains the Pipeline class used for executing multiple operations. +# ----------------------------------------------------------------------------- + +from typing import Any, Callable, Optional, Union + +from . import utils +from .base import BaseMetaClass +from .base_impl import PipelineImpl, PipelineOpImpl, PipelineOpResultImpl +from .defaults import defaults +from .enums import PipelineOpType +from .errors import _Error +from .fetch_info import FetchInfo + + +class PipelineOp(metaclass=BaseMetaClass): + + def __repr__(self): + cls_name = self.__class__._public_name + return f"<{cls_name} of type {self.op_type.name}>" + + def _create_result(self): + """ + Internal method used for creating a result object that is returned when + running a pipeline. + """ + impl = PipelineOpResultImpl(self._impl) + result = PipelineOpResult.__new__(PipelineOpResult) + result._operation = self + result._impl = impl + return result + + @property + def arraysize(self) -> int: + """ + This read-only attribute returns the array size that will be used when + fetching query rows with :meth:`Pipeline.add_fetchall()`. For all other + operations, the value returned is *0*. + """ + return self._impl.arraysize + + @property + def fetch_decimals(self) -> bool: + """ + Returns whether or not to fetch columns of type ``NUMBER`` as + ``decimal.Decimal`` values for a query. + """ + return self._impl.fetch_decimals + + @property + def fetch_lobs(self) -> bool: + """ + Returns whether or not to fetch LOB locators for a query. + """ + return self._impl.fetch_lobs + + @property + def keyword_parameters(self) -> Any: + """ + This read-only attribute returns the keyword parameters to the stored + procedure or function being called by the operation, if applicable. + """ + return self._impl.keyword_parameters + + @property + def name(self) -> Union[str, None]: + """ + This read-only attribute returns the name of the stored procedure or + function being called by the operation, if applicable. + """ + return self._impl.name + + @property + def num_rows(self) -> int: + """ + This read-only attribute returns the number of rows to fetch when + performing a query of a specific number of rows. For all other + operations, the value returned is *0*. + """ + return self._impl.num_rows + + @property + def op_type(self) -> PipelineOpType: + """ + This read-only attribute returns the type of operation that is taking + place. + """ + return PipelineOpType(self._impl.op_type) + + @property + def parameters(self) -> Any: + """ + This read-only attribute returns the parameters to the stored procedure + or function or the parameters bound to the statement being executed by + the operation, if applicable. + """ + return self._impl.parameters + + @property + def return_type(self) -> Any: + """ + This read-only attribute returns the return type of the stored function + being called by the operation, if applicable. + """ + return self._impl.return_type + + @property + def rowfactory(self) -> Union[Callable, None]: + """ + This read-only attribute returns the row factory callable function to + be used in a query executed by the operation, if applicable. + """ + return self._impl.rowfactory + + @property + def statement(self) -> Union[str, None]: + """ + This read-only attribute returns the statement being executed by the + operation, if applicable. + """ + return self._impl.statement + + +class PipelineOpResult(metaclass=BaseMetaClass): + + def __repr__(self): + cls_name = self.__class__._public_name + return ( + f"<{cls_name} for operation of type {self.operation.op_type.name}>" + ) + + @property + def columns(self) -> Union[list[FetchInfo], None]: + """ + This read-only attribute is a list of FetchInfo objects. This + attribute will be *None* for operations that do not return rows. + """ + if self._impl.fetch_metadata is not None: + return [FetchInfo._from_impl(i) for i in self._impl.fetch_metadata] + + @property + def error(self) -> Union[_Error, None]: + """ + This read-only attribute returns the error that occurred when running + this operation. If no error occurred, then the value *None* is + returned. + """ + return self._impl.error + + @property + def operation(self) -> PipelineOp: + """ + This read-only attribute returns the PipelineOp operation object that + generated the result. + """ + return self._operation + + @property + def return_value(self) -> Any: + """ + This read-only attribute returns the return value of the called PL/SQL + function, if a function was called for the operation. + """ + return self._impl.return_value + + @property + def rows(self) -> Union[list, None]: + """ + This read-only attribute returns the rows that were fetched by the + operation, if a query was executed. + """ + return self._impl.rows + + @property + def warning(self) -> Union[_Error, None]: + """ + This read-only attribute returns any warning that was encountered when + running this operation. If no warning was encountered, then the value + *None* is returned. + """ + return self._impl.warning + + +class Pipeline(metaclass=BaseMetaClass): + + def __repr__(self): + cls_name = self.__class__._public_name + return f"<{cls_name} with {len(self._impl.operations)} operations>" + + def _add_op(self, op_impl): + """ + Internal method for adding an PipelineOpImpl instance to the list of + operations, creating an associated PipelineOp instance to correspond to + it. + """ + self._impl.operations.append(op_impl) + op = PipelineOp.__new__(PipelineOp) + op._impl = op_impl + self._operations.append(op) + return op + + def add_callfunc( + self, + name: str, + return_type: Any, + parameters: Optional[Union[list, tuple]] = None, + keyword_parameters: Optional[dict] = None, + ) -> PipelineOp: + """ + Adds an operation to the pipeline that calls a stored PL/SQL function + with the given parameters and return type. The created PipelineOp + object is also returned from this function. + + When the Pipeline is executed, the PipelineOpResult object that is + returned for this operation will have the + :attr:`~PipelineOpResult.return_value` attribute populated with the + return value of the PL/SQL function if the call completes + successfully. + """ + utils.verify_stored_proc_args(parameters, keyword_parameters) + op_impl = PipelineOpImpl( + op_type=PipelineOpType.CALL_FUNC, + name=name, + return_type=return_type, + parameters=parameters, + keyword_parameters=keyword_parameters, + ) + return self._add_op(op_impl) + + def add_callproc( + self, + name: str, + parameters: Optional[Union[list, tuple]] = None, + keyword_parameters: Optional[dict] = None, + ) -> PipelineOp: + """ + Adds an operation that calls a stored procedure with the given + parameters. The created PipelineOp object is also returned from + this function. + """ + utils.verify_stored_proc_args(parameters, keyword_parameters) + op_impl = PipelineOpImpl( + op_type=PipelineOpType.CALL_PROC, + name=name, + parameters=parameters, + keyword_parameters=keyword_parameters, + ) + return self._add_op(op_impl) + + def add_commit(self) -> PipelineOp: + """ + Adds an operation that performs a commit. + """ + op_impl = PipelineOpImpl(op_type=PipelineOpType.COMMIT) + return self._add_op(op_impl) + + def add_execute( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + ) -> PipelineOp: + """ + Adds an operation that executes a statement with the given parameters. + The created PipelineOp object is also returned from this function. + + Do not use this for queries that return rows. Instead use + :meth:`Pipeline.add_fetchall()`, :meth:`Pipeline.add_fetchmany()`, or + :meth:`Pipeline.add_fetchone()`. + """ + op_impl = PipelineOpImpl( + op_type=PipelineOpType.EXECUTE, + statement=statement, + parameters=parameters, + ) + return self._add_op(op_impl) + + def add_executemany( + self, + statement: str, + parameters: Union[list, int], + ) -> PipelineOp: + """ + Adds an operation that executes a SQL statement once using all bind + value mappings or sequences found in the sequence parameters. This can + be used to insert, update, or delete multiple rows in a table. It can + also invoke a PL/SQL procedure multiple times. + + The created PipelineOp object is also returned from this function. + + The ``parameters`` parameter can be a list of tuples, where each tuple + item maps to one bind variable placeholder in ``statement``. It can + also be a list of dictionaries, where the keys match the bind variable + placeholder names in ``statement``. If there are no bind values, or + values have previously been bound, the ``parameters`` value can be an + integer specifying the number of iterations. + """ + op_impl = PipelineOpImpl( + op_type=PipelineOpType.EXECUTE_MANY, + statement=statement, + parameters=parameters, + ) + return self._add_op(op_impl) + + def add_fetchall( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + arraysize: Optional[int] = None, + rowfactory: Optional[Callable] = None, + fetch_lobs: Optional[bool] = None, + fetch_decimals: Optional[bool] = None, + ) -> PipelineOp: + """ + Adds an operation that executes a query and returns all of the rows + from the result set. The created PipelineOp object is also returned + from this function. + + When the Pipeline is executed, the PipelineOpResult object that is + returned for this operation will have the + :attr:`~PipelineOpResult.rows` attribute populated with the list of + rows returned by the query. + + The default value for ``arraysize`` is + :attr:`oracledb.defaults.arraysize `. + + Internally, this operation's :attr:`Cursor.prefetchrows` size is set + to the value of the explicit or default ``arraysize`` parameter value. + + The ``fetch_lobs`` parameter specifies whether to return LOB locators + or ``str``/``bytes`` values when fetching LOB columns. The default + value is :data:`oracledb.defaults.fetch_lobs `. + + The ``fetch_decimals`` parameter specifies whether to return + ``decimal.Decimal`` values when fetching columns of type ``NUMBER``. + The default value is + :data:`oracledb.defaults.fetch_decimals `. + """ + if arraysize is None: + arraysize = defaults.arraysize + op_impl = PipelineOpImpl( + op_type=PipelineOpType.FETCH_ALL, + statement=statement, + parameters=parameters, + arraysize=arraysize, + rowfactory=rowfactory, + fetch_lobs=fetch_lobs, + fetch_decimals=fetch_decimals, + ) + return self._add_op(op_impl) + + def add_fetchmany( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + num_rows: Optional[int] = None, + rowfactory: Optional[Callable] = None, + fetch_lobs: Optional[bool] = None, + fetch_decimals: Optional[bool] = None, + ) -> PipelineOp: + """ + Adds an operation that executes a query and returns up to the specified + number of rows from the result set. The created PipelineOp object is + also returned from this function. + + When the Pipeline is executed, the PipelineOpResult object that is + returned for this operation will have the + :attr:`~PipelineOpResult.rows` attribute populated with the list of + rows returned by the query. + + The default value for ``num_rows`` is the value of + :attr:`oracledb.defaults.arraysize `. + + Internally, this operation's :attr:`Cursor.prefetchrows` size is set to + the value of the explicit or default ``num_rows`` parameter, allowing + all rows to be fetched in one round-trip. + + Since only one fetch is performed for a query operation, consider + adding a ``FETCH NEXT`` clause to the statement to prevent the + database processing rows that will never be fetched. + + The ``fetch_lobs`` parameter specifies whether to return LOB locators + or ``str``/``bytes`` values when fetching LOB columns. The default + value is :data:`oracledb.defaults.fetch_lobs `. + + The ``fetch_decimals`` parameter specifies whether to return + ``decimal.Decimal`` values when fetching columns of type ``NUMBER``. + The default value is + :data:`oracledb.defaults.fetch_decimals `. + """ + if num_rows is None: + num_rows = defaults.arraysize + op_impl = PipelineOpImpl( + op_type=PipelineOpType.FETCH_MANY, + statement=statement, + parameters=parameters, + num_rows=num_rows, + rowfactory=rowfactory, + fetch_lobs=fetch_lobs, + fetch_decimals=fetch_decimals, + ) + return self._add_op(op_impl) + + def add_fetchone( + self, + statement: str, + parameters: Optional[Union[list, tuple, dict]] = None, + rowfactory: Optional[Callable] = None, + fetch_lobs: Optional[bool] = None, + fetch_decimals: Optional[bool] = None, + ) -> PipelineOp: + """ + Adds an operation that executes a query and returns the first row of + the result set if one exists (or *None*, if no rows exist). The + created PipelineOp object is also returned from this function. + + When the Pipeline is executed, the PipelineOpResult object that is + returned for this operation will have the + :attr:`~PipelineOpResult.rows` attribute populated with this row if the + query is performed successfully. + + Internally, this operation's :attr:`Cursor.prefetchrows` and + :attr:`Cursor.arraysize` sizes will be set to *1*. + + Since only one fetch is performed for a query operation, consider + adding a ``WHERE`` condition or using a ``FETCH NEXT`` clause in the + statement to prevent the database processing rows that will never be + fetched. + + The ``fetch_lobs`` parameter specifies whether to return LOB locators + or ``str``/``bytes`` values when fetching LOB columns. The default + value is :data:`oracledb.defaults.fetch_lobs `. + + The ``fetch_decimals`` parameter specifies whether to return + ``decimal.Decimal`` values when fetching columns of type ``NUMBER``. + The default value is + :data:`oracledb.defaults.fetch_decimals `. + """ + op_impl = PipelineOpImpl( + op_type=PipelineOpType.FETCH_ONE, + statement=statement, + parameters=parameters, + rowfactory=rowfactory, + fetch_lobs=fetch_lobs, + fetch_decimals=fetch_decimals, + ) + return self._add_op(op_impl) + + @property + def operations(self) -> list[PipelineOp]: + """ + This read-only attribute returns the list of operations associated with + the pipeline. + """ + return self._operations + + +def create_pipeline() -> Pipeline: + """ + Creates a pipeline object which can be used to process a set of operations + against a database. + """ + pipeline = Pipeline.__new__(Pipeline) + pipeline._impl = PipelineImpl() + pipeline._operations = [] + return pipeline diff --git a/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/azure_config_provider.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/azure_config_provider.cpython-39.pyc new file mode 100644 index 0000000..97ef441 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/azure_config_provider.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/azure_tokens.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/azure_tokens.cpython-39.pyc new file mode 100644 index 0000000..8b3f146 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/azure_tokens.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/oci_config_provider.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/oci_config_provider.cpython-39.pyc new file mode 100644 index 0000000..3c6dc8f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/oci_config_provider.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/oci_tokens.cpython-39.pyc b/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/oci_tokens.cpython-39.pyc new file mode 100644 index 0000000..37662db Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/plugins/__pycache__/oci_tokens.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/plugins/azure_config_provider.py b/.venv/lib/python3.9/site-packages/oracledb/plugins/azure_config_provider.py new file mode 100644 index 0000000..52719f8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/plugins/azure_config_provider.py @@ -0,0 +1,274 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2024, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# azure_config_provider.py +# +# Python file contains the hook method config_azure_hook() that fetches config +# store from Azure App Configuration. +# ----------------------------------------------------------------------------- + +import json +import re + +import oracledb + +from urllib.parse import urlparse, parse_qs +from azure.appconfiguration import AzureAppConfigurationClient +from azure.keyvault.secrets import SecretClient +from azure.core.exceptions import ResourceNotFoundError +from azure.identity import ( + ClientSecretCredential, + CertificateCredential, + ManagedIdentityCredential, + ChainedTokenCredential, + EnvironmentCredential, +) + + +def _get_authentication_method(parameters): + auth_method = parameters.get("authentication", parameters.get("method")) + if auth_method is not None: + auth_method = auth_method.upper() + if auth_method == "AZURE_DEFAULT": + auth_method = None + return auth_method + + +def _get_credential(parameters): + """ + Returns the appropriate credential given the input supplied by the original + connect string. + """ + + tokens = [] + auth_method = _get_authentication_method(parameters) + + if auth_method is None or auth_method == "AZURE_SERVICE_PRINCIPAL": + if "azure_client_secret" in parameters: + tokens.append( + ClientSecretCredential( + _get_required_parameter(parameters, "azure_tenant_id"), + _get_required_parameter(parameters, "azure_client_id"), + _get_required_parameter(parameters, "azure_client_secret"), + ) + ) + elif "azure_client_certificate_path" in parameters: + tokens.append( + CertificateCredential( + _get_required_parameter(parameters, "azure_tenant_id"), + _get_required_parameter(parameters, "azure_client_id"), + _get_required_parameter( + parameters, "azure_client_certificate_path" + ), + ) + ) + if auth_method is None or auth_method == "AZURE_MANAGED_IDENTITY": + client_id = parameters.get("azure_managed_identity_client_id") + if client_id is not None: + tokens.append(ManagedIdentityCredential(client_id=client_id)) + + if len(tokens) == 0: + message = ( + "Authentication options were not available in Connection String" + ) + raise Exception(message) + elif len(tokens) == 1: + return tokens[0] + tokens.append(EnvironmentCredential()) + return ChainedTokenCredential(*tokens) + + +def _get_password(pwd_string, parameters): + try: + pwd = json.loads(pwd_string) + except json.JSONDecodeError: + message = ( + "Password is expected to be JSON" + " containing Azure Vault details." + ) + raise Exception(message) + + pwd["value"] = pwd.pop("uri") + pwd["type"] = "azurevault" + + # make authentication section + pwd["authentication"] = authentication = {} + + authentication["method"] = auth_method = _get_authentication_method( + parameters + ) + + if auth_method is None or auth_method == "AZURE_SERVICE_PRINCIPAL": + if "azure_client_secret" in parameters: + authentication["azure_tenant_id"] = _get_required_parameter( + parameters, "azure_tenant_id" + ) + authentication["azure_client_id"] = _get_required_parameter( + parameters, "azure_client_id" + ) + authentication["azure_client_secret"] = _get_required_parameter( + parameters, "azure_client_secret" + ) + + elif "azure_client_certificate_path" in parameters: + authentication["azure_tenant_id"] = ( + _get_required_parameter(parameters, "azure_tenant_id"), + ) + authentication["azure_client_id"] = ( + _get_required_parameter(parameters, "azure_client_id"), + ) + authentication["azure_client_certificate_path"] = ( + _get_required_parameter( + parameters, "azure_client_certificate_path" + ) + ) + + if auth_method is None or auth_method == "AZURE_MANAGED_IDENTITY": + authentication["azure_managed_identity_client_id"] = parameters.get( + "azure_managed_identity_client_id" + ) + return pwd + + +def _get_required_parameter(parameters, name, location="connection string"): + try: + return parameters[name] + except KeyError: + message = f'Parameter named "{name}" is missing from {location}' + raise Exception(message) from None + + +def _get_setting(client, key, sub_key, label, required=True): + """ + Returns the configuration setting given the client, key and label. + """ + try: + if key.endswith("/"): + actual_key = f"{key}{sub_key}" + else: + actual_key = f"{key}/{sub_key}" + obj = client.get_configuration_setting(key=actual_key, label=label) + except ResourceNotFoundError: + if required: + message = f"Missing required configuration key: {actual_key}" + raise Exception(message) + return None + return obj.value + + +def _parse_parameters(protocol_arg: str) -> dict: + """ + Parse the parameters from the protocol argument string. + """ + pos = protocol_arg.find("?") + parsed_url = urlparse(protocol_arg[pos + 1 :]) + parsed_values = parse_qs(parsed_url.path) + parameters = { + key.lower(): value[0] for key, value in parsed_values.items() + } + config_name = protocol_arg[:pos].rstrip("/") + if not config_name.endswith(".azconfig.io"): + config_name += ".azconfig.io" + parameters["appconfigname"] = config_name + return parameters + + +def password_type_azure_vault_hook(args): + uri = _get_required_parameter(args, "value", '"password" key section') + credential = args.get("credential") + + if credential is None: + # if credential not present, this might be coming + # from oci config provider, so create credential + # for azure key vault. + auth = args.get("authentication") + if auth is None: + raise Exception( + "Azure Vault authentication details were not provided." + ) + credential = _get_credential(auth) + + pattern = re.compile( + r"(?Phttps://[A-Za-z0-9._-]+)/" + r"secrets/(?P[A-Za-z][A-Za-z0-9-]*)$" + ) + match = pattern.match(uri) + if match is None: + raise Exception("Invalid Azure Vault details") + vault_url = match.group("vault_url") + secret_key = match.group("secretKey") + secret_client = SecretClient(vault_url, credential) + return secret_client.get_secret(secret_key).value + + +def _process_config(parameters, connect_params): + """ + Processes the configuration stored in the Azure App configuration store. + """ + + credential = _get_credential(parameters) + client = AzureAppConfigurationClient( + "https://" + _get_required_parameter(parameters, "appconfigname"), + credential, + ) + key = _get_required_parameter(parameters, "key") + label = parameters.get("label") + + # get the common parameters + config = {} + config["connect_descriptor"] = _get_setting( + client, key, "connect_descriptor", label + ) + config["user"] = _get_setting(client, key, "user", label, required=False) + pwd = _get_setting(client, key, "password", label, required=False) + if pwd is not None: + config["password"] = _get_password(pwd, parameters) + + config["config_time_to_live"] = _get_setting( + client, key, "config_time_to_live", label, required=False + ) + config["config_time_to_live_grace_period"] = _get_setting( + client, key, "config_time_to_live_grace_period", label, required=False + ) + + # get the python-oracledb specific parameters + settings = _get_setting(client, key, "pyo", label, required=False) + if settings is not None: + config["pyo"] = json.loads(settings) + + # set the configuration + connect_params.set_from_config(config) + + +def config_azure_hook(protocol, protocol_arg, connect_params): + """ + Hook for handling parameters stored in an Azure configuration store. + """ + parameters = _parse_parameters(protocol_arg) + _process_config(parameters, connect_params) + + +oracledb.register_password_type("azurevault", password_type_azure_vault_hook) +oracledb.register_protocol("config-azure", config_azure_hook) diff --git a/.venv/lib/python3.9/site-packages/oracledb/plugins/azure_tokens.py b/.venv/lib/python3.9/site-packages/oracledb/plugins/azure_tokens.py new file mode 100644 index 0000000..df0a3ea --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/plugins/azure_tokens.py @@ -0,0 +1,81 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2024, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# azure_tokens.py +# +# Methods that generates an OAuth2 access token using the MSAL SDK +# ----------------------------------------------------------------------------- + +import msal +import oracledb + + +def generate_token(token_auth_config, refresh=False): + """ + Generates an Azure access token based on provided credentials. + """ + user_auth_type = token_auth_config.get("auth_type") or "" + auth_type = user_auth_type.lower() + if auth_type == "azureserviceprincipal": + return _service_principal_credentials(token_auth_config) + else: + raise ValueError( + f"Unrecognized auth_type authentication method: {user_auth_type}" + ) + + +def _service_principal_credentials(token_auth_config): + """ + Returns the access token for authentication as a service principal. + """ + msal_config = { + "authority": token_auth_config["authority"], + "client_id": token_auth_config["client_id"], + "client_credential": token_auth_config["client_credential"], + } + # Initialize the Confidential Client Application + cca = msal.ConfidentialClientApplication(**msal_config) + auth_response = cca.acquire_token_for_client( + scopes=[token_auth_config["scopes"]] + ) + + if "access_token" in auth_response: + return auth_response["access_token"] + + +def azure_token_hook(params: oracledb.ConnectParams): + """ + Azure-specific hook for generating a token. + """ + if params.extra_auth_params is not None: + + def token_callback(refresh): + return generate_token(params.extra_auth_params, refresh) + + params.set(access_token=token_callback) + + +# Register the token hook for Azure +oracledb.register_params_hook(azure_token_hook) diff --git a/.venv/lib/python3.9/site-packages/oracledb/plugins/oci_config_provider.py b/.venv/lib/python3.9/site-packages/oracledb/plugins/oci_config_provider.py new file mode 100644 index 0000000..7abf3aa --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/plugins/oci_config_provider.py @@ -0,0 +1,253 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2024, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# oci_config_provider.py +# +# Python file contains the hook method config_oci_hook() that fetches config +# store from OCI Object Storage. +# ----------------------------------------------------------------------------- + +import base64 +import json +import oci +import oracledb +import re + +from urllib.parse import urlparse, parse_qs + +oci_from_file = oci.config.from_file +oci_client_error = oci.exceptions.ClientError +oci_object_storage_client = oci.object_storage.ObjectStorageClient +oci_secrets_client = oci.secrets.SecretsClient + + +""" +Pattern to parse OCI Object Connect String +""" +cloud_net_naming_pattern_oci = re.compile( + r"(?P[^/]+)/n/(?P[^/]+)/b/(?P[^/]+)/o/(?P[^/]+)(/c/(?P[^/]+))?" +) + + +def _get_config(parameters, connect_params): + config = {} + + credential, signer = _get_credential(parameters) + auth_method = parameters.get("authentication") + if auth_method is not None: + auth_method = auth_method.upper() + + if auth_method is None or auth_method == "OCI_DEFAULT": + client_oci = oci_object_storage_client(credential) + elif ( + auth_method == "OCI_INSTANCE_PRINCIPAL" + or auth_method == "OCI_RESOURCE_PRINCIPAL" + ): + client_oci = oci_object_storage_client( + config=credential, signer=signer + ) + get_object_request = { + "object_name": _get_required_parameter(parameters, "filename"), + "bucket_name": _get_required_parameter(parameters, "bucketname"), + "namespace_name": _get_required_parameter(parameters, "namespace"), + } + + get_object_response = client_oci.get_object(**get_object_request) + resp = _stream_to_string(get_object_response.data) + settings = json.loads(resp) + user_alias = parameters.get("alias") + if user_alias: + settings = settings[user_alias] + + # Connect Descriptor + config["connect_descriptor"] = _get_required_parameter( + settings, "connect_descriptor" + ) + + # user and password + if connect_params.user is None: + config["user"] = settings.get("user") + if "password" in settings: + config["password"] = pwd = settings["password"] + if pwd["type"] == "ocivault": + authentication = pwd.setdefault("authentication", {}) + authentication.setdefault("method", auth_method) + authentication["credential"] = credential + + # config cache settings + config["config_time_to_live"] = settings.get("config_time_to_live") + config["config_time_to_live_grace_period"] = settings.get( + "config_time_to_live_grace_period" + ) + + # pyo parameters settings + config["pyo"] = settings.get("pyo", None) + + # set the configuration + connect_params.set_from_config(config) + + +def _get_credential(parameters): + """ + Returns the appropriate credential given the input supplied by the original + connect string. + """ + auth_method = parameters.get("authentication", parameters.get("method")) + + if auth_method is not None: + auth_method = auth_method.upper() + + # if region is not in connection string, retrieve from object server name. + region = parameters.get( + "oci_region", _retrieve_region(parameters.get("objservername")) + ) + + try: + if auth_method is None or auth_method == "OCI_DEFAULT": + # Default Authentication + # default path ~/.oci/config + return oci_from_file(), None + except oci.exceptions.ClientError: + # try to create config with connection string parameters. + if "oci_tenancy" in parameters and "oci_user" in parameters: + with open(parameters["oci_key_file"], "r") as file_content: + public_key = file_content.read() + provider = dict( + tenancy=parameters["oci_tenancy"], + user=parameters["oci_user"], + fingerprint=parameters["oci_fingerprint"], + key_file=parameters["oci_key_file"], + private_key_content=public_key, + region=region, + ) + return provider, None + + if auth_method == "OCI_INSTANCE_PRINCIPAL": + signer = oci.auth.signers.InstancePrincipalsSecurityTokenSigner() + return ( + dict(region=region), + signer, + ) + + elif auth_method == "OCI_RESOURCE_PRINCIPAL": + signer = oci.auth.signers.get_resource_principals_signer() + return {}, signer + else: + msg = "Authentication options were not available in Connection String" + raise Exception(msg) + + +def _get_required_parameter(parameters, name, location="connection string"): + try: + return parameters[name] + except KeyError: + message = f'Parameter named "{name}" is missing from {location}' + raise Exception(message) from None + + +def _parse_parameters(protocol_arg: str) -> dict: + """ + Parse the parameters from the protocol argument string. + """ + pos = protocol_arg.find("?") + parsed_url = urlparse(protocol_arg[pos + 1 :]) + parsed_values = parse_qs(parsed_url.path) + parameters = { + key.lower(): value[0] for key, value in parsed_values.items() + } + + match = cloud_net_naming_pattern_oci.match(protocol_arg[:pos]) + if match: + parameters["objservername"] = match.group("objservername") + parameters["namespace"] = match.group("namespace") + parameters["bucketname"] = match.group("bucketname") + parameters["filename"] = match.group("filename") + if match.group("alias"): + parameters["alias"] = match.group("alias") + return parameters + + +def password_type_oci_vault_hook(args): + secret_id = _get_required_parameter( + args, "value", '"password" key section' + ) + authentication = args.get("authentication") + if authentication is None: + raise Exception( + "OCI Key Vault authentication details were not provided." + ) + + # if credentials are not present, create credentials with given + # authentication details. + credential = authentication.get("credential") + if credential is None: + credential, signer = _get_credential(authentication) + + auth_method = authentication.get("method") + if auth_method is not None: + auth_method = auth_method.upper() + if auth_method is None or auth_method == "OCI_DEFAULT": + secret_client_oci = oci_secrets_client(credential) + elif auth_method == "OCI_INSTANCE_PRINCIPAL": + signer = oci.auth.signers.InstancePrincipalsSecurityTokenSigner() + secret_client_oci = oci_secrets_client( + config=credential, signer=signer + ) + elif auth_method == "OCI_RESOURCE_PRINCIPAL": + signer = oci.auth.signers.get_resource_principals_signer() + secret_client_oci = oci_secrets_client( + config=credential, signer=signer + ) + + get_secret_bundle_response = secret_client_oci.get_secret_bundle( + secret_id=secret_id + ) + # decoding the vault content + b64content = get_secret_bundle_response.data.secret_bundle_content.content + return base64.b64decode(b64content).decode() + + +def _retrieve_region(objservername): + if objservername is not None: + arr = objservername.split(".") + return arr[1].lower().replace("_", "-") + + +def _stream_to_string(stream): + return b"".join(stream).decode() + + +def config_oci_hook( + protocol: str, protocol_arg: str, connect_params: oracledb.ConnectParams +): + """ + Hook for handling parameters stored in an OCI Object store. + """ + parameters = _parse_parameters(protocol_arg) + _get_config(parameters, connect_params) + + +oracledb.register_password_type("ocivault", password_type_oci_vault_hook) +oracledb.register_protocol("config-ociobject", config_oci_hook) diff --git a/.venv/lib/python3.9/site-packages/oracledb/plugins/oci_tokens.py b/.venv/lib/python3.9/site-packages/oracledb/plugins/oci_tokens.py new file mode 100644 index 0000000..50723de --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/plugins/oci_tokens.py @@ -0,0 +1,169 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2024, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# oci_tokens.py +# +# Methods that generates an OCI access token using the OCI SDK +# ----------------------------------------------------------------------------- + +import oci +import oracledb +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives import serialization + + +def generate_token(token_auth_config, refresh=False): + """ + Generates an OCI access token based on provided credentials. + """ + user_auth_type = token_auth_config.get("auth_type") or "" + auth_type = user_auth_type.lower() + if auth_type == "configfileauthentication": + return _config_file_based_authentication(token_auth_config) + elif auth_type == "simpleauthentication": + return _simple_authentication(token_auth_config) + elif auth_type == "instanceprincipal": + return _instance_principal_authentication(token_auth_config) + else: + raise ValueError( + f"Unrecognized auth_type authentication method {user_auth_type}" + ) + + +def _get_key_pair(): + """ + Generates a public-private key pair for proof of possession. + """ + private_key = rsa.generate_private_key( + public_exponent=65537, + key_size=4096, + ) + private_key_pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ).decode("utf-8") + + public_key_pem = ( + private_key.public_key() + .public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + .decode("utf-8") + ) + + if not oracledb.is_thin_mode(): + p_key = "".join( + line.strip() + for line in private_key_pem.splitlines() + if not ( + line.startswith("-----BEGIN") or line.startswith("-----END") + ) + ) + private_key_pem = p_key + + return {"private_key": private_key_pem, "public_key": public_key_pem} + + +def _generate_access_token(client, token_auth_config): + """ + Token generation logic used by authentication methods. + """ + key_pair = _get_key_pair() + scope = token_auth_config.get("scope", "urn:oracle:db::id::*") + + details = oci.identity_data_plane.models.GenerateScopedAccessTokenDetails( + scope=scope, public_key=key_pair["public_key"] + ) + response = client.generate_scoped_access_token( + generate_scoped_access_token_details=details + ) + + return (response.data.token, key_pair["private_key"]) + + +def _config_file_based_authentication(token_auth_config): + """ + Config file base authentication implementation: config parameters + are provided in a file. + """ + file_location = token_auth_config.get( + "file_location", oci.config.DEFAULT_LOCATION + ) + profile = token_auth_config.get("profile", oci.config.DEFAULT_PROFILE) + + # Load OCI config + config = oci.config.from_file(file_location, profile) + oci.config.validate_config(config) + + # Initialize service client with default config file + client = oci.identity_data_plane.DataplaneClient(config) + + return _generate_access_token(client, token_auth_config) + + +def _simple_authentication(token_auth_config): + """ + Simple authentication: config parameters are passed as parameters + """ + config = { + "user": token_auth_config["user"], + "key_file": token_auth_config["key_file"], + "fingerprint": token_auth_config["fingerprint"], + "tenancy": token_auth_config["tenancy"], + "region": token_auth_config["region"], + "profile": token_auth_config["profile"], + } + oci.config.validate_config(config) + + client = oci.identity_data_plane.DataplaneClient(config) + return _generate_access_token(client, token_auth_config) + + +def _instance_principal_authentication(token_auth_config): + """ + Instance principal authentication: for compute instances + with dynamic group access. + """ + signer = oci.auth.signers.InstancePrincipalsSecurityTokenSigner() + client = oci.identity_data_plane.DataplaneClient(config={}, signer=signer) + return _generate_access_token(client, token_auth_config) + + +def oci_token_hook(params: oracledb.ConnectParams): + """ + OCI-specific hook for generating a token. + """ + if params.extra_auth_params is not None: + + def token_callback(refresh): + return generate_token(params.extra_auth_params, refresh) + + params.set(access_token=token_callback) + + +# Register the token hook for OCI +oracledb.register_params_hook(oci_token_hook) diff --git a/.venv/lib/python3.9/site-packages/oracledb/pool.py b/.venv/lib/python3.9/site-packages/oracledb/pool.py new file mode 100644 index 0000000..db9aa19 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/pool.py @@ -0,0 +1,1704 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2020, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# pool.py +# +# Contains the ConnectionPool class and the factory method create_pool() used +# for creating connection pools. +# +# *** NOTICE *** This file is generated from a template and should not be +# modified directly. See build_from_template.py in the utils subdirectory for +# more information. +# ----------------------------------------------------------------------------- + +import functools +import ssl +import threading +from typing import Callable, Type, Union, Any, Optional + +import oracledb + +from . import base_impl, thick_impl, thin_impl +from . import connection as connection_module +from . import driver_mode +from . import errors +from .base import BaseMetaClass +from .pool_params import PoolParams + + +class BaseConnectionPool(metaclass=BaseMetaClass): + _impl = None + + def __init__( + self, + dsn: Optional[str] = None, + *, + params: Optional[PoolParams] = None, + cache_name: Optional[str] = None, + **kwargs, + ) -> None: + """ + Constructor for creating a connection pool. + """ + if params is None: + params_impl = base_impl.PoolParamsImpl() + elif not isinstance(params, PoolParams): + errors._raise_err(errors.ERR_INVALID_POOL_PARAMS) + else: + params_impl = params._impl.copy() + with driver_mode.get_manager() as mode_mgr: + thin = mode_mgr.thin + dsn = params_impl.process_args(dsn, kwargs, thin) + self._set_connection_type(params_impl.connectiontype) + self._cache_name = cache_name + if cache_name is not None: + named_pools.add_pool(cache_name, self) + try: + if issubclass( + self._connection_type, connection_module.AsyncConnection + ): + impl = thin_impl.AsyncThinPoolImpl(dsn, params_impl) + elif thin: + impl = thin_impl.ThinPoolImpl(dsn, params_impl) + else: + impl = thick_impl.ThickPoolImpl(dsn, params_impl) + self._impl = impl + self.session_callback = params_impl.session_callback + except: + if cache_name is not None: + del named_pools.pools[cache_name] + raise + + def _verify_open(self) -> None: + """ + Verifies that the pool is open and able to perform its work. + """ + if self._impl is None: + errors._raise_err(errors.ERR_POOL_NOT_OPEN) + + @property + def busy(self) -> int: + """ + This read-only attribute returns the number of connections currently + acquired. + """ + self._verify_open() + return self._impl.get_busy_count() + + @property + def dsn(self) -> str: + """ + This read-only attribute returns the TNS entry of the database to which + a connection has been established. + """ + self._verify_open() + return self._impl.dsn + + @property + def getmode(self) -> oracledb.PoolGetMode: + """ + This read-write attribute determines how connections are returned from + the pool. If :data:`~oracledb.POOL_GETMODE_FORCEGET` is specified, a + new connection will be returned even if there are no free connections + in the pool. :data:`~oracledb.POOL_GETMODE_NOWAIT` will raise an + exception if there are no free connections are available in the pool. + If :data:`~oracledb.POOL_GETMODE_WAIT` is specified and there are no + free connections in the pool, the caller will wait until a free + connection is available. :data:`~oracledb.POOL_GETMODE_TIMEDWAIT` uses + the value of :data:`~ConnectionPool.wait_timeout` to determine how long + the caller should wait for a connection to become available before + returning an error. + """ + self._verify_open() + return oracledb.PoolGetMode(self._impl.get_getmode()) + + @getmode.setter + def getmode(self, value: oracledb.PoolGetMode) -> None: + self._verify_open() + self._impl.set_getmode(value) + + @property + def homogeneous(self) -> bool: + """ + This read-only boolean attribute indicates whether the pool is + considered :ref:`homogeneous ` or not. If the pool is + not homogeneous, different authentication can be used for each + connection acquired from the pool. + """ + self._verify_open() + return self._impl.homogeneous + + @property + def increment(self) -> int: + """ + This read-only attribute returns the number of connections that will be + established when additional connections need to be created. + """ + self._verify_open() + return self._impl.increment + + @property + def max(self) -> int: + """ + This read-only attribute returns the maximum number of connections that + the pool can control. + """ + self._verify_open() + return self._impl.max + + @property + def max_lifetime_session(self) -> int: + """ + This read-write attribute is the maximum length of time (in seconds) + that a pooled connection may exist since first being created. A value + of *0* means there is no limit. Connections become candidates for + termination when they are acquired or released back to the pool, and + have existed for longer than ``max_lifetime_session`` seconds. + Connections that are in active use will not be closed. In + python-oracledb Thick mode, Oracle Client libraries 12.1 or later must + be used and, prior to Oracle Client 21, cleanup only occurs when the + pool is accessed. + """ + self._verify_open() + return self._impl.get_max_lifetime_session() + + @max_lifetime_session.setter + def max_lifetime_session(self, value: int) -> None: + self._verify_open() + self._impl.set_max_lifetime_session(value) + + @property + def max_sessions_per_shard(self) -> int: + """ + This read-write attribute returns the number of sessions that can be + created per shard in the pool. Setting this attribute greater than zero + specifies the maximum number of sessions in the pool that can be used + for any given shard in a sharded database. This lets connections in the + pool be balanced across the shards. A value of *0* will not set any + maximum number of sessions for each shard. This attribute is only + available in Oracle Client 18.3 and higher. + """ + self._verify_open() + return self._impl.get_max_sessions_per_shard() + + @max_sessions_per_shard.setter + def max_sessions_per_shard(self, value: int) -> None: + self._verify_open() + self._impl.set_max_sessions_per_shard(value) + + @property + def min(self) -> int: + """ + This read-only attribute returns the number of connections with which + the connection pool was created and the minimum number of connections + that will be controlled by the connection pool. + """ + self._verify_open() + return self._impl.min + + @property + def name(self) -> str: + """ + This read-only attribute returns the name assigned to the pool by + Oracle. + """ + self._verify_open() + return self._impl.name + + @property + def opened(self) -> int: + """ + This read-only attribute returns the number of connections currently + opened by the pool. + """ + self._verify_open() + return self._impl.get_open_count() + + @property + def ping_interval(self) -> int: + """ + This read-write integer attribute specifies the pool ping interval in + seconds. When a connection is acquired from the pool, a check is first + made to see how long it has been since the connection was put into the + pool. If this idle time exceeds ``ping_interval``, then a + :ref:`round-trip ` ping to the database is performed. If + the connection is unusable, it is discarded and a different connection + is selected to be returned by :meth:`acquire()`. Setting + ``ping_interval`` to a negative value disables pinging. Setting it to + *0* forces a ping for every :meth:`acquire()` and is not recommended. + """ + self._verify_open() + return self._impl.get_ping_interval() + + @ping_interval.setter + def ping_interval(self, value: int) -> None: + self._impl.set_ping_interval(value) + + @property + def soda_metadata_cache(self) -> bool: + """ + This read-write boolean attribute returns whether the SODA metadata + cache is enabled or not. Enabling the cache significantly improves the + performance of methods :meth:`SodaDatabase.createCollection()` (when + not specifying a value for the ``metadata`` parameter) and + :meth:`SodaDatabase.openCollection()`. Note that the cache can become + out of date if changes to the metadata of cached collections are made + externally. + """ + self._verify_open() + return self._impl.get_soda_metadata_cache() + + @soda_metadata_cache.setter + def soda_metadata_cache(self, value: bool) -> None: + if not isinstance(value, bool): + message = "soda_metadata_cache must be a boolean value." + raise TypeError(message) + self._verify_open() + self._impl.set_soda_metadata_cache(value) + + @property + def stmtcachesize(self) -> int: + """ + This read-write attribute specifies the size of the statement cache + that will be used for connections obtained from the pool. Once a + connection is created, that connection’s statement cache size can only + be changed by setting the ``stmtcachesize`` attribute on the connection + itself. + """ + self._verify_open() + return self._impl.get_stmt_cache_size() + + @stmtcachesize.setter + def stmtcachesize(self, value: int) -> None: + self._verify_open() + self._impl.set_stmt_cache_size(value) + + @property + def thin(self) -> bool: + """ + This read-only attribute returns a boolean indicating if + python-oracledb is in Thin mode (*True*) or Thick mode (*False*). + """ + self._verify_open() + return not isinstance(self._impl, thick_impl.ThickPoolImpl) + + @property + def timeout(self) -> int: + """ + This read-write attribute specifies the time (in seconds) after which + idle connections will be terminated in order to maintain an optimum + number of open connections. A value of *0* means that no idle + connections are terminated. Note that in python-oracledb Thick mode + with older Oracle Client Libraries, the termination only occurs when + the pool is accessed. + """ + self._verify_open() + return self._impl.get_timeout() + + @timeout.setter + def timeout(self, value: int) -> None: + self._verify_open() + self._impl.set_timeout(value) + + @property + def tnsentry(self) -> str: + """ + Deprecated. Use dsn instead. + """ + return self.dsn + + @property + def username(self) -> str: + """ + This read-only attribute returns the name of the user which established + the connection to the database. + """ + self._verify_open() + return self._impl.username + + @property + def wait_timeout(self) -> int: + """ + This read-write attribute specifies the time (in milliseconds) that the + caller should wait for a connection to become available in the pool + before returning with an error. This value is only used if the + ``getmode`` parameter to :meth:`oracledb.create_pool()` was the value + :data:`oracledb.POOL_GETMODE_TIMEDWAIT`. + """ + self._verify_open() + return self._impl.get_wait_timeout() + + @wait_timeout.setter + def wait_timeout(self, value: int) -> None: + self._verify_open() + self._impl.set_wait_timeout(value) + + +class ConnectionPool(BaseConnectionPool): + + def __del__(self): + if self._impl is not None: + self._impl.close(True) + self._impl = None + + def _set_connection_type(self, conn_class): + """ + Called internally when the pool is created to ensure that the correct + connection class is used for all connections created by the pool. + """ + if conn_class is None: + conn_class = connection_module.Connection + elif not issubclass( + conn_class, connection_module.Connection + ) or issubclass(conn_class, connection_module.AsyncConnection): + errors._raise_err(errors.ERR_INVALID_CONN_CLASS) + self._connection_type = conn_class + + def acquire( + self, + user: Optional[str] = None, + password: Optional[str] = None, + cclass: Optional[str] = None, + purity: int = oracledb.PURITY_DEFAULT, + tag: Optional[str] = None, + matchanytag: bool = False, + shardingkey: Optional[list] = None, + supershardingkey: Optional[list] = None, + ) -> "connection_module.Connection": + """ + Acquires a connection from the session pool and returns a + :ref:`connection object `. + + If the pool is :ref:`homogeneous `, the ``user`` and + ``password`` parameters cannot be specified. If they are, an exception + will be raised. + + The ``cclass`` parameter, if specified, should be a string + corresponding to the connection class for :ref:`drcp`. + + The ``purity`` parameter is expected to be one of + :data:`~oracledb.PURITY_NEW`, :data:`~oracledb.PURITY_SELF`, or + :data:`~oracledb.PURITY_DEFAULT`. + + The ``tag`` parameter, if specified, is expected to be a string with + name=value pairs like "k1=v1;k2=v2" and will limit the connections that + can be returned from a connection pool unless the ``matchanytag`` + parameter is set to *True*. In that case, connections with the + specified tag will be preferred over others, but if no such connections + are available, then a connection with a different tag may be returned + instead. In any case, untagged connections will always be returned if + no connections with the specified tag are available. Connections are + tagged when they are :meth:`released ` back to + the pool. + + The ``shardingkey`` and ``supershardingkey`` parameters, if specified, + are expected to be a sequence of values which will be used to identify + the database shard to connect to. The key values can be strings, + numbers, bytes, or dates. See :ref:`connsharding`. + + When using the :ref:`connection pool cache `, calling + :meth:`oracledb.connect()` with a ``pool_alias`` parameter is the same + as calling ``pool.acquire()``. + """ + self._verify_open() + + return oracledb.connect( + conn_class=self._connection_type, + user=user, + password=password, + cclass=cclass, + purity=purity, + tag=tag, + matchanytag=matchanytag, + shardingkey=shardingkey, + supershardingkey=supershardingkey, + pool=self, + ) + + def close(self, force: bool = False) -> None: + """ + Closes the pool now, rather than when the last reference to it is + released, which makes it unusable for further work. + + If any connections have been acquired and not released back to the + pool, this method will fail unless the ``force`` parameter is set to + *True*. + """ + self._verify_open() + self._impl.close(force) + if self._cache_name is not None: + named_pools.remove_pool(self._cache_name) + self._impl = None + + def drop(self, connection: "connection_module.Connection") -> None: + """ + Drops the connection from the pool which is useful if the connection is + no longer usable (such as when the session is killed). + """ + self._verify_open() + if not isinstance(connection, connection_module.Connection): + message = "connection must be an instance of oracledb.Connection" + raise TypeError(message) + connection._verify_connected() + self._impl.drop(connection._impl) + connection._impl = None + + def reconfigure( + self, + min: Optional[int] = None, + max: Optional[int] = None, + increment: Optional[int] = None, + getmode: Optional[int] = None, + timeout: Optional[int] = None, + wait_timeout: Optional[int] = None, + max_lifetime_session: Optional[int] = None, + max_sessions_per_shard: Optional[int] = None, + soda_metadata_cache: Optional[bool] = None, + stmtcachesize: Optional[int] = None, + ping_interval: Optional[int] = None, + ) -> None: + """ + Reconfigures various parameters of a connection pool. The pool size can + be altered with ``reconfigure()`` by passing values for + :data:`~ConnectionPool.min`, :data:`~ConnectionPool.max` or + :data:`~ConnectionPool.increment`. The + :data:`~ConnectionPool.getmode`, :data:`~ConnectionPool.timeout`, + :data:`~ConnectionPool.wait_timeout`, + :data:`~ConnectionPool.max_lifetime_session`, + :data:`~ConnectionPool.max_sessions_per_shard`, + :data:`~ConnectionPool.soda_metadata_cache`, + :data:`~ConnectionPool.stmtcachesize` and + :data:`~ConnectionPool.ping_interval` attributes can be set directly or + with ``reconfigure()``. + + All parameters are optional. Unspecified parameters will leave those + pool attributes unchanged. The parameters are processed in two stages. + After any size change has been processed, reconfiguration on the other + parameters is done sequentially. If an error such as an invalid value + occurs when changing one attribute, then an exception will be generated + but any already changed attributes will retain their new values. + + During reconfiguration of a pool's size, the behavior of + :meth:`ConnectionPool.acquire()` depends on the ``getmode`` in effect + when ``acquire()`` is called: + + * With mode :data:`~oracledb.POOL_GETMODE_FORCEGET`, an ``acquire()`` + call will wait until the pool has been reconfigured. + + * With mode :data:`~oracledb.POOL_GETMODE_TIMEDWAIT`, an ``acquire()`` + call will try to acquire a connection in the time specified by + pool.wait_timeout and return an error if the time taken exceeds that + value. + + * With mode :data:`~oracledb.POOL_GETMODE_WAIT`, an ``acquire()`` call + will wait until after the pool has been reconfigured and a connection + is available. + + * With mode :data:`~oracledb.POOL_GETMODE_NOWAIT`, if the number of + busy connections is less than the pool size, ``acquire()`` will + return a new connection after pool reconfiguration is complete. + + Closing connections with :meth:`ConnectionPool.release()` or + :meth:`Connection.close()` will wait until any pool size + reconfiguration is complete. + + Closing the connection pool with :meth:`ConnectionPool.close()` will + wait until reconfiguration is complete. + """ + + if min is None: + min = self.min + if max is None: + max = self.max + if increment is None: + increment = self.increment + if self.min != min or self.max != max or self.increment != increment: + self._impl.reconfigure(min, max, increment) + if getmode is not None: + self.getmode = getmode + if timeout is not None: + self.timeout = timeout + if wait_timeout is not None: + self.wait_timeout = wait_timeout + if max_lifetime_session is not None: + self.max_lifetime_session = max_lifetime_session + if max_sessions_per_shard is not None: + self.max_sessions_per_shard = max_sessions_per_shard + if soda_metadata_cache is not None: + self.soda_metadata_cache = soda_metadata_cache + if stmtcachesize is not None: + self.stmtcachesize = stmtcachesize + if ping_interval is not None: + self.ping_interval = ping_interval + + def release( + self, + connection: "connection_module.Connection", + tag: Optional[str] = None, + ) -> None: + """ + Releases the connection back to the pool now, rather than whenever + __del__ is called. The connection will be unusable from this point + forward; an Error exception will be raised if any operation is + attempted with the connection. Any cursors or LOBs created by the + connection will also be marked unusable and an Error exception will be + raised if any operation is attempted with them. + + Internally, references to the connection are held by cursor objects, + LOB objects, etc. Once all of these references are released, the + connection itself will be released back to the pool automatically. + Either control references to these related objects carefully or + explicitly release connections back to the pool in order to ensure + sufficient resources are available. + + If the tag is not *None*, it is expected to be a string with name=value + pairs like "k1=v1;k2=v2" and will override the value in the property + :attr:`Connection.tag`. If either :attr:`Connection.tag` or the tag + parameter are not *None*, the connection will be retagged when it is + released back to the pool. + """ + self._verify_open() + if not isinstance(connection, connection_module.Connection): + message = "connection must be an instance of oracledb.Connection" + raise TypeError(message) + connection._verify_connected() + if tag is not None: + connection.tag = tag + self._impl.return_connection(connection._impl) + connection._impl = None + + +def _pool_factory( + f: Callable[..., ConnectionPool], +) -> Callable[..., ConnectionPool]: + """ + Decorator which checks the validity of the supplied keyword parameters by + calling the original function (which does nothing), then creates and + returns an instance of the requested ConnectionPool class. The base + ConnectionPool class constructor does not check the validity of the + supplied keyword parameters. + """ + + @functools.wraps(f) + def create_pool( + dsn: Optional[str] = None, + *, + pool_class: Type[ConnectionPool] = ConnectionPool, + pool_alias: Optional[str] = None, + params: Optional[PoolParams] = None, + **kwargs, + ) -> ConnectionPool: + f( + dsn=dsn, + pool_class=pool_class, + pool_alias=pool_alias, + params=params, + **kwargs, + ) + if not issubclass(pool_class, ConnectionPool): + errors._raise_err(errors.ERR_INVALID_POOL_CLASS) + return pool_class(dsn, params=params, cache_name=pool_alias, **kwargs) + + return create_pool + + +@_pool_factory +def create_pool( + dsn: Optional[str] = None, + *, + pool_class: Type[ConnectionPool] = ConnectionPool, + pool_alias: Optional[str] = None, + params: Optional[PoolParams] = None, + min: Optional[int] = None, + max: Optional[int] = None, + increment: Optional[int] = None, + connectiontype: Optional[Type["oracledb.Connection"]] = None, + getmode: Optional[oracledb.PoolGetMode] = None, + homogeneous: Optional[bool] = None, + timeout: Optional[int] = None, + wait_timeout: Optional[int] = None, + max_lifetime_session: Optional[int] = None, + session_callback: Optional[Callable] = None, + max_sessions_per_shard: Optional[int] = None, + soda_metadata_cache: Optional[bool] = None, + ping_interval: Optional[int] = None, + ping_timeout: Optional[int] = None, + user: Optional[str] = None, + proxy_user: Optional[str] = None, + password: Optional[str] = None, + newpassword: Optional[str] = None, + wallet_password: Optional[str] = None, + access_token: Optional[Union[str, tuple, Callable]] = None, + host: Optional[str] = None, + port: Optional[int] = None, + protocol: Optional[str] = None, + https_proxy: Optional[str] = None, + https_proxy_port: Optional[int] = None, + service_name: Optional[str] = None, + instance_name: Optional[str] = None, + sid: Optional[str] = None, + server_type: Optional[str] = None, + cclass: Optional[str] = None, + purity: Optional[oracledb.Purity] = None, + expire_time: Optional[int] = None, + retry_count: Optional[int] = None, + retry_delay: Optional[int] = None, + tcp_connect_timeout: Optional[float] = None, + ssl_server_dn_match: Optional[bool] = None, + ssl_server_cert_dn: Optional[str] = None, + wallet_location: Optional[str] = None, + events: Optional[bool] = None, + externalauth: Optional[bool] = None, + mode: Optional[oracledb.AuthMode] = None, + disable_oob: Optional[bool] = None, + stmtcachesize: Optional[int] = None, + edition: Optional[str] = None, + tag: Optional[str] = None, + matchanytag: Optional[bool] = None, + config_dir: Optional[str] = None, + appcontext: Optional[list] = None, + shardingkey: Optional[list] = None, + supershardingkey: Optional[list] = None, + debug_jdwp: Optional[str] = None, + connection_id_prefix: Optional[str] = None, + ssl_context: Optional[Any] = None, + sdu: Optional[int] = None, + pool_boundary: Optional[str] = None, + use_tcp_fast_open: Optional[bool] = None, + ssl_version: Optional[ssl.TLSVersion] = None, + program: Optional[str] = None, + machine: Optional[str] = None, + terminal: Optional[str] = None, + osuser: Optional[str] = None, + driver_name: Optional[str] = None, + use_sni: Optional[bool] = None, + thick_mode_dsn_passthrough: Optional[bool] = None, + extra_auth_params: Optional[dict] = None, + pool_name: Optional[str] = None, + handle: Optional[int] = None, +) -> ConnectionPool: + """ + Creates a connection pool with the supplied parameters and returns it. + + The ``dsn`` parameter (data source name) can be a string in the format + user/password@connect_string or can simply be the connect string (in + which case authentication credentials such as the username and password + need to be specified separately). See the documentation on connection + strings for more information. + + The ``pool_class`` parameter is expected to be ConnectionPool or a subclass + of ConnectionPool. + + The ``pool_alias`` parameter is expected to be a string representing the + name used to store and reference the pool in the python-oracledb connection + pool cache. If this parameter is not specified, then the pool will not be + added to the cache. The value of this parameter can be used with the + :meth:`oracledb.get_pool()` and :meth:`oracledb.connect()` methods to + access the pool. + + The ``params`` parameter is expected to be of type PoolParams and contains + parameters that are used to create the pool. See the documentation on + PoolParams for more information. If this parameter is not specified, the + additional keyword parameters will be used to create an instance of + PoolParams. If both the ``params`` parameter and additional keyword + parameters are specified, the values in the keyword parameters have + precedence. Note that if a ``dsn`` is also supplied, then in + python-oracledb Thin mode, the values of the parameters specified (if any) + within the dsn will override the values passed as additional keyword + parameters, which themselves override the values set in the ``params`` + parameter object. + + The following parameters are all optional. A brief description of each + parameter follows: + + - ``min``: the minimum number of connections the pool should contain + (default: 1) + + - ``max``: the maximum number of connections the pool should contain + (default: 2) + + - ``increment``: the number of connections that should be added to the pool + whenever the pool needs to grow + (default: 1) + + - ``connectiontype``: the class of the connection that should be returned + during calls to pool.acquire(). It must be oracledb.Connection or a + subclass of oracledb.Connection + (default: None) + + - ``getmode``: how pool.acquire() will behave. One of the constants + :data:`oracledb.POOL_GETMODE_WAIT`, :data:`oracledb.POOL_GETMODE_NOWAIT`, + :data:`oracledb.POOL_GETMODE_FORCEGET`, or + :data:`oracledb.POOL_GETMODE_TIMEDWAIT` + (default: :attr:`oracledb.POOL_GETMODE_WAIT`) + + - ``homogeneous``: a boolean indicating whether the connections in the pool + are homogeneous (same user) or heterogeneous (multiple users) + (default: True) + + - ``timeout``: the length of time (in seconds) that a connection may remain + idle in the pool before it is terminated. If it is 0 then connections are + never terminated + (default: 0) + + - ``wait_timeout``: the length of time (in milliseconds) that a caller + should wait when acquiring a connection from the pool with getmode set to + :data:`oracledb.POOL_GETMODE_TIMEDWAIT` + (default: 0) + + - ``max_lifetime_session``: the length of time (in seconds) that + connections can remain in the pool. If it is 0 then connections may + remain in the pool indefinitely + (default: 0) + + - ``session_callback``: a callable that is invoked when a connection is + returned from the pool for the first time, or when the connection tag + differs from the one requested + (default: None) + + - ``max_sessions_per_shard``: the maximum number of connections that may be + associated with a particular shard + (default: 0) + + - ``soda_metadata_cache``: a boolean indicating whether or not the SODA + metadata cache should be enabled + (default: False) + + - ``ping_interval``: the length of time (in seconds) after which an unused + connection in the pool will be a candidate for pinging when + pool.acquire() is called. If the ping to the database indicates the + connection is not alive a replacement connection will be returned by + pool.acquire(). If ping_interval is a negative value the ping + functionality will be disabled + (default: 60) + + - ``ping_timeout``: the maximum length of time (in milliseconds) to wait + for a connection in the pool to respond to an internal ping to the + database before being discarded and replaced during a call to acquire() + (default: 5000) + + - ``user``: the name of the database user to connect to + (default: None) + + - ``proxy_user``: the name of the proxy user to connect to. If this value + is not specified, it will be parsed out of user if user is in the form + "user[proxy_user]" + (default: None) + + - ``password``: the password for the database user + (default: None) + + - ``newpassword``: a new password for the database user. The new password + will take effect immediately upon a successful connection to the database + (default: None) + + - ``wallet_password``: the password to use to decrypt the wallet, if it is + encrypted. This is not the database password. For Oracle Autonomous + Database this is the password created when downloading the wallet. This + value is only used in python-oracledb Thin mode. + (default: None) + + - ``access_token``: a string, or a 2-tuple, or a callable. If it is a + string, it specifies an Entra ID OAuth2 token used for Open Authorization + (OAuth 2.0) token based authentication. If it is a 2-tuple, it specifies + the token and private key strings used for Oracle Cloud Infrastructure + (OCI) Identity and Access Management (IAM) token based authentication. If + it is a callable, it returns either a string or a 2-tuple used for OAuth + 2.0 or OCI IAM token based authentication and is useful when the pool + needs to expand and create new connections but the current authentication + token has expired + (default: None) + + - ``host``: the hostname or IP address of the machine hosting the database + or the database listener + (default: None) + + - ``port``: the port number on which the database listener is listening + (default: 1521) + + - ``protocol``: one of the strings "tcp" or "tcps" indicating whether to + use unencrypted network traffic or encrypted network traffic (TLS) + (default: "tcp") + + - ``https_proxy``: the hostname or IP address of a proxy host to use for + tunneling secure connections + (default: None) + + - ``https_proxy_port``: the port on which to communicate with the proxy + host + (default: 0) + + - ``service_name``: the service name of the database + (default: None) + + - ``instance_name``: the instance name of the database + (default: None) + + - ``sid``: the system identifier (SID) of the database. Note using a + service_name instead is recommended + (default: None) + + - ``server_type``: the type of server connection that should be + established. If specified, it should be one of strings "dedicated", + "shared" or "pooled" + (default: None) + + - ``cclass``: the connection class to use for Database Resident Connection + Pooling (DRCP) + (default: None) + + - ``purity``: the connection purity to use for Database Resident Connection + Pooling (DRCP) + (default: :attr:`oracledb.PURITY_DEFAULT`) + + - ``expire_time``: the number of minutes between the sending of keepalive + probes. If this parameter is set to a value greater than zero it enables + keepalive + (default: 0) + + - ``retry_count``: the number of times that initial connection + establishment should be retried before the connection attempt is + terminated + (default: 0) + + - ``retry_delay``: the number of seconds to wait before retrying to + establish a connection + (default: 1) + + - ``tcp_connect_timeout``: a float indicating the maximum number of seconds + to wait when establishing a connection to the database host + (default: 20.0) + + - ``ssl_server_dn_match``: a boolean indicating whether the server + certificate distinguished name (DN) should be matched in addition to the + regular certificate verification that is performed. Note that if the + ssl_server_cert_dn parameter is not privided, host name matching is + performed instead + (default: True) + + - ``ssl_server_cert_dn``: the distinguished name (DN) which should be + matched with the server. This value is ignored if the ssl_server_dn_match + parameter is not set to the value True. If specified this value is used + for any verfication. Otherwise the hostname will be used + (default: None) + + - ``wallet_location``: the directory where the wallet can be found. In + python-oracledb Thin mode this must be the directory containing the PEM- + encoded wallet file ewallet.pem. In python-oracledb Thick mode this must + be the directory containing the file cwallet.sso + (default: None) + + - ``events``: a boolean specifying whether events mode should be enabled. + This value is only used in python-oracledb Thick mode and is needed for + continuous query notification and high availability event notifications + (default: False) + + - ``externalauth``: a boolean indicating whether to use external + authentication + (default: False) + + - ``mode``: the authorization mode to use. One of the constants + :data:`oracledb.AUTH_MODE_DEFAULT`, :data:`oracledb.AUTH_MODE_PRELIM`, + :data:`oracledb.AUTH_MODE_SYSASM`, :data:`oracledb.AUTH_MODE_SYSBKP`, + :data:`oracledb.AUTH_MODE_SYSDBA`, :data:`oracledb.AUTH_MODE_SYSDGD`, + :data:`oracledb.AUTH_MODE_SYSKMT`, :data:`oracledb.AUTH_MODE_SYSOPER`, or + :data:`oracledb.AUTH_MODE_SYSRAC` + (default: :attr:`oracledb.AUTH_MODE_DEFAULT`) + + - ``disable_oob``: a boolean indicating whether out-of-band breaks should + be disabled. This value is only used in python-oracledb Thin mode. It has + no effect on Windows which does not support this functionality + (default: False) + + - ``stmtcachesize``: the size of the statement cache + (default: :attr:`oracledb.defaults.stmtcachesize + `) + + - ``edition``: edition to use for the connection. This parameter cannot be + used simultaneously with the cclass parameter + (default: None) + + - ``tag``: identifies the type of connection that should be returned from a + pool. This value is only used in python-oracledb Thick mode + (default: None) + + - ``matchanytag``: a boolean specifying whether any tag can be used when + acquiring a connection from the pool. This value is only used in python- + oracledb Thick mode + (default: False) + + - ``config_dir``: a directory in which the optional tnsnames.ora + configuration file is located. This value is only used in python-oracledb + Thin mode. For python-oracledb Thick mode, it is used if + :attr:`oracledb.defaults.thick_mode_dsn_passthrough + ` is *False*. Otherwise in Thick + mode use the ``config_dir`` parameter of + :meth:`oracledb.init_oracle_client()` + (default: :attr:`oracledb.defaults.config_dir + `) + + - ``appcontext``: application context used by the connection. It should be + a list of 3-tuples (namespace, name, value) and each entry in the tuple + should be a string + (default: None) + + - ``shardingkey``: a list of strings, numbers, bytes or dates that identify + the database shard to connect to. This value is only used in python- + oracledb Thick mode + (default: None) + + - ``supershardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + (default: None) + + - ``debug_jdwp``: a string with the format "host=;port=" that + specifies the host and port of the PL/SQL debugger. This value is only + used in python-oracledb Thin mode. For python-oracledb Thick mode set + the ORA_DEBUG_JDWP environment variable + (default: None) + + - ``connection_id_prefix``: an application specific prefix that is added to + the connection identifier used for tracing + (default: None) + + - ``ssl_context``: an SSLContext object used for connecting to the database + using TLS. This SSL context will be modified to include the private key + or any certificates found in a separately supplied wallet. This parameter + should only be specified if the default SSLContext object cannot be used + (default: None) + + - ``sdu``: the requested size of the Session Data Unit (SDU), in bytes. The + value tunes internal buffers used for communication to the database. + Bigger values can increase throughput for large queries or bulk data + loads, but at the cost of higher memory use. The SDU size that will + actually be used is negotiated down to the lower of this value and the + database network SDU configuration value + (default: 8192) + + - ``pool_boundary``: one of the values "statement" or "transaction" + indicating when pooled DRCP connections can be returned to the pool. This + requires the use of DRCP with Oracle Database 23.4 or higher + (default: None) + + - ``use_tcp_fast_open``: a boolean indicating whether to use TCP fast open. + This is an Oracle Autonomous Database Serverless (ADB-S) specific + property for clients connecting from within OCI Cloud network. Please + refer to the ADB-S documentation for more information + (default: False) + + - ``ssl_version``: one of the values ssl.TLSVersion.TLSv1_2 or + ssl.TLSVersion.TLSv1_3 indicating which TLS version to use + (default: None) + + - ``program``: a string recorded by Oracle Database as the program from + which the connection originates + (default: :attr:`oracledb.defaults.program + `) + + - ``machine``: a string recorded by Oracle Database as the name of the + machine from which the connection originates + (default: :attr:`oracledb.defaults.machine + `) + + - ``terminal``: a string recorded by Oracle Database as the terminal + identifier from which the connection originates + (default: :attr:`oracledb.defaults.terminal + `) + + - ``osuser``: a string recorded by Oracle Database as the operating system + user who originated the connection + (default: :attr:`oracledb.defaults.osuser + `) + + - ``driver_name``: a string recorded by Oracle Database as the name of the + driver which originated the connection + (default: :attr:`oracledb.defaults.driver_name + `) + + - ``use_sni``: a boolean indicating whether to use the TLS SNI extension to + bypass the second TLS neogiation that would otherwise be required + (default: False) + + - ``thick_mode_dsn_passthrough``: a boolean indicating whether to pass the + connect string to the Oracle Client libraries unchanged without parsing + by the driver. Setting this to False makes python-oracledb Thick and Thin + mode applications behave similarly regarding connection string parameter + handling and locating any optional tnsnames.ora configuration file + (default: :attr:`oracledb.defaults.thick_mode_dsn_passthrough + `) + + - ``extra_auth_params``: a dictionary containing configuration parameters + necessary for Oracle Database authentication using plugins, such as the + Azure and OCI cloud-native authentication plugins + (default: None) + + - ``pool_name``: the name of the DRCP pool when using multi-pool DRCP with + Oracle Database 23.4, or higher + (default: None) + + - ``handle``: an integer representing a pointer to a valid service context + handle. This value is only used in python-oracledb Thick mode. It should + be used with extreme caution + (default: 0) + """ + pass + + +class AsyncConnectionPool(BaseConnectionPool): + + def _set_connection_type(self, conn_class): + """ + Called internally when the pool is created to ensure that the correct + connection class is used for all connections created by the pool. + """ + if conn_class is None: + conn_class = connection_module.AsyncConnection + elif not issubclass(conn_class, connection_module.AsyncConnection): + errors._raise_err(errors.ERR_INVALID_CONN_CLASS) + self._connection_type = conn_class + + def acquire( + self, + user: Optional[str] = None, + password: Optional[str] = None, + cclass: Optional[str] = None, + purity: int = oracledb.PURITY_DEFAULT, + tag: Optional[str] = None, + matchanytag: bool = False, + shardingkey: Optional[list] = None, + supershardingkey: Optional[list] = None, + ) -> "connection_module.AsyncConnection": + """ + Acquires a connection from the pool and returns an :ref:`asynchronous + connection object `. + + If the pool is :ref:`homogeneous `, the ``user`` and + ``password`` parameters cannot be specified. If they are, an exception + will be raised. + + The ``cclass`` parameter, if specified, should be a string + corresponding to the connection class for :ref:`drcp`. + + The ``purity`` parameter is expected to be one of + :data:`~oracledb.PURITY_NEW`, :data:`~oracledb.PURITY_SELF`, or + :data:`~oracledb.PURITY_DEFAULT`. + + The ``tag``, ``matchanytag``, ``shardingkey``, and ``supershardingkey`` + parameters are ignored in python-oracledb Thin mode. + """ + self._verify_open() + + return oracledb.connect_async( + conn_class=self._connection_type, + user=user, + password=password, + cclass=cclass, + purity=purity, + tag=tag, + matchanytag=matchanytag, + shardingkey=shardingkey, + supershardingkey=supershardingkey, + pool=self, + ) + + async def close(self, force: bool = False) -> None: + """ + Closes the pool now, rather than when the last reference to it is + released, which makes it unusable for further work. + + If any connections have been acquired and not released back to the + pool, this method will fail unless the ``force`` parameter is set to + *True*. + """ + self._verify_open() + await self._impl.close(force) + if self._cache_name is not None: + named_pools.remove_pool(self._cache_name) + self._impl = None + + async def drop(self, connection: "connection_module.Connection") -> None: + """ + Drops the connection from the pool which is useful if the connection is + no longer usable (such as when the session is killed). + """ + self._verify_open() + if not isinstance(connection, connection_module.AsyncConnection): + message = ( + "connection must be an instance of oracledb.AsyncConnection" + ) + raise TypeError(message) + connection._verify_connected() + await self._impl.drop(connection._impl) + connection._impl = None + + async def release( + self, + connection: "connection_module.AsyncConnection", + tag: Optional[str] = None, + ) -> None: + """ + Releases the connection back to the pool now. The connection will be + unusable from this point forward. An Error exception will be raised if + any operation is attempted with the connection. Any cursors or LOBs + created by the connection will also be marked unusable and an Error + exception will be raised if any operation is attempted with them. + + The ``tag`` parameter is ignored in python-oracledb Thin mode. + """ + self._verify_open() + if not isinstance(connection, connection_module.AsyncConnection): + message = ( + "connection must be an instance of oracledb.AsyncConnection" + ) + raise TypeError(message) + if tag is not None: + connection.tag = tag + await self._impl.return_connection(connection._impl) + connection._impl = None + + +def _async_pool_factory( + f: Callable[..., AsyncConnectionPool], +) -> Callable[..., AsyncConnectionPool]: + """ + Decorator which checks the validity of the supplied keyword parameters by + calling the original function (which does nothing), then creates and + returns an instance of the requested ConnectionPool class. The base + ConnectionPool class constructor does not check the validity of the + supplied keyword parameters. + """ + + @functools.wraps(f) + def create_pool_async( + dsn: Optional[str] = None, + *, + pool_class: Type[ConnectionPool] = AsyncConnectionPool, + pool_alias: Optional[str] = None, + params: Optional[PoolParams] = None, + **kwargs, + ) -> AsyncConnectionPool: + f( + dsn=dsn, + pool_class=pool_class, + pool_alias=pool_alias, + params=params, + **kwargs, + ) + oracledb.enable_thin_mode() + if not issubclass(pool_class, AsyncConnectionPool): + errors._raise_err(errors.ERR_INVALID_POOL_CLASS) + return pool_class(dsn, params=params, cache_name=pool_alias, **kwargs) + + return create_pool_async + + +@_async_pool_factory +def create_pool_async( + dsn: Optional[str] = None, + *, + pool_class: Type[ConnectionPool] = AsyncConnectionPool, + pool_alias: Optional[str] = None, + params: Optional[PoolParams] = None, + min: Optional[int] = None, + max: Optional[int] = None, + increment: Optional[int] = None, + connectiontype: Optional[Type["oracledb.AsyncConnection"]] = None, + getmode: Optional[oracledb.PoolGetMode] = None, + homogeneous: Optional[bool] = None, + timeout: Optional[int] = None, + wait_timeout: Optional[int] = None, + max_lifetime_session: Optional[int] = None, + session_callback: Optional[Callable] = None, + max_sessions_per_shard: Optional[int] = None, + soda_metadata_cache: Optional[bool] = None, + ping_interval: Optional[int] = None, + ping_timeout: Optional[int] = None, + user: Optional[str] = None, + proxy_user: Optional[str] = None, + password: Optional[str] = None, + newpassword: Optional[str] = None, + wallet_password: Optional[str] = None, + access_token: Optional[Union[str, tuple, Callable]] = None, + host: Optional[str] = None, + port: Optional[int] = None, + protocol: Optional[str] = None, + https_proxy: Optional[str] = None, + https_proxy_port: Optional[int] = None, + service_name: Optional[str] = None, + instance_name: Optional[str] = None, + sid: Optional[str] = None, + server_type: Optional[str] = None, + cclass: Optional[str] = None, + purity: Optional[oracledb.Purity] = None, + expire_time: Optional[int] = None, + retry_count: Optional[int] = None, + retry_delay: Optional[int] = None, + tcp_connect_timeout: Optional[float] = None, + ssl_server_dn_match: Optional[bool] = None, + ssl_server_cert_dn: Optional[str] = None, + wallet_location: Optional[str] = None, + events: Optional[bool] = None, + externalauth: Optional[bool] = None, + mode: Optional[oracledb.AuthMode] = None, + disable_oob: Optional[bool] = None, + stmtcachesize: Optional[int] = None, + edition: Optional[str] = None, + tag: Optional[str] = None, + matchanytag: Optional[bool] = None, + config_dir: Optional[str] = None, + appcontext: Optional[list] = None, + shardingkey: Optional[list] = None, + supershardingkey: Optional[list] = None, + debug_jdwp: Optional[str] = None, + connection_id_prefix: Optional[str] = None, + ssl_context: Optional[Any] = None, + sdu: Optional[int] = None, + pool_boundary: Optional[str] = None, + use_tcp_fast_open: Optional[bool] = None, + ssl_version: Optional[ssl.TLSVersion] = None, + program: Optional[str] = None, + machine: Optional[str] = None, + terminal: Optional[str] = None, + osuser: Optional[str] = None, + driver_name: Optional[str] = None, + use_sni: Optional[bool] = None, + thick_mode_dsn_passthrough: Optional[bool] = None, + extra_auth_params: Optional[dict] = None, + pool_name: Optional[str] = None, + handle: Optional[int] = None, +) -> AsyncConnectionPool: + """ + Creates a connection pool with the supplied parameters and returns it. + + The ``dsn`` parameter (data source name) can be a string in the format + user/password@connect_string or can simply be the connect string (in + which case authentication credentials such as the username and password + need to be specified separately). See the documentation on connection + strings for more information. + + The ``pool_class`` parameter is expected to be AsyncConnectionPool or a + subclass of AsyncConnectionPool. + + The ``pool_alias`` parameter is expected to be a string representing the + name used to store and reference the pool in the python-oracledb connection + pool cache. If this parameter is not specified, then the pool will not be + added to the cache. The value of this parameter can be used with the + :meth:`oracledb.get_pool()` and :meth:o`racledb.connect_async()` methods to + access the pool. + + The ``params`` parameter is expected to be of type PoolParams and contains + parameters that are used to create the pool. See the documentation on + PoolParams for more information. If this parameter is not specified, the + additional keyword parameters will be used to create an instance of + PoolParams. If both the ``params`` parameter and additional keyword + parameters are specified, the values in the keyword parameters have + precedence. Note that if a ``dsn`` is also supplied, then in + python-oracledb Thin mode, the values of the parameters specified (if any) + within the ``dsn`` will override the values passed as additional keyword + parameters, which themselves override the values set in the ``params`` + parameter object. + + The following parameters are all optional. A brief description of each + parameter follows: + + - ``min``: the minimum number of connections the pool should contain + (default: 1) + + - ``max``: the maximum number of connections the pool should contain + (default: 2) + + - ``increment``: the number of connections that should be added to the pool + whenever the pool needs to grow + (default: 1) + + - ``connectiontype``: the class of the connection that should be returned + during calls to pool.acquire(). It must be oracledb.AsyncConnection or a + subclass of oracledb.AsyncConnection + (default: None) + + - ``getmode``: how pool.acquire() will behave. One of the constants + :data:`oracledb.POOL_GETMODE_WAIT`, :data:`oracledb.POOL_GETMODE_NOWAIT`, + :data:`oracledb.POOL_GETMODE_FORCEGET`, or + :data:`oracledb.POOL_GETMODE_TIMEDWAIT` + (default: :attr:`oracledb.POOL_GETMODE_WAIT`) + + - ``homogeneous``: a boolean indicating whether the connections in the pool + are homogeneous (same user) or heterogeneous (multiple users) + (default: True) + + - ``timeout``: the length of time (in seconds) that a connection may remain + idle in the pool before it is terminated. If it is 0 then connections are + never terminated + (default: 0) + + - ``wait_timeout``: the length of time (in milliseconds) that a caller + should wait when acquiring a connection from the pool with getmode set to + :data:`oracledb.POOL_GETMODE_TIMEDWAIT` + (default: 0) + + - ``max_lifetime_session``: the length of time (in seconds) that + connections can remain in the pool. If it is 0 then connections may + remain in the pool indefinitely + (default: 0) + + - ``session_callback``: a callable that is invoked when a connection is + returned from the pool for the first time, or when the connection tag + differs from the one requested + (default: None) + + - ``max_sessions_per_shard``: the maximum number of connections that may be + associated with a particular shard + (default: 0) + + - ``soda_metadata_cache``: a boolean indicating whether or not the SODA + metadata cache should be enabled + (default: False) + + - ``ping_interval``: the length of time (in seconds) after which an unused + connection in the pool will be a candidate for pinging when + pool.acquire() is called. If the ping to the database indicates the + connection is not alive a replacement connection will be returned by + pool.acquire(). If ping_interval is a negative value the ping + functionality will be disabled + (default: 60) + + - ``ping_timeout``: the maximum length of time (in milliseconds) to wait + for a connection in the pool to respond to an internal ping to the + database before being discarded and replaced during a call to acquire() + (default: 5000) + + - ``user``: the name of the database user to connect to + (default: None) + + - ``proxy_user``: the name of the proxy user to connect to. If this value + is not specified, it will be parsed out of user if user is in the form + "user[proxy_user]" + (default: None) + + - ``password``: the password for the database user + (default: None) + + - ``newpassword``: a new password for the database user. The new password + will take effect immediately upon a successful connection to the database + (default: None) + + - ``wallet_password``: the password to use to decrypt the wallet, if it is + encrypted. This is not the database password. For Oracle Autonomous + Database this is the password created when downloading the wallet. This + value is only used in python-oracledb Thin mode. + (default: None) + + - ``access_token``: a string, or a 2-tuple, or a callable. If it is a + string, it specifies an Entra ID OAuth2 token used for Open Authorization + (OAuth 2.0) token based authentication. If it is a 2-tuple, it specifies + the token and private key strings used for Oracle Cloud Infrastructure + (OCI) Identity and Access Management (IAM) token based authentication. If + it is a callable, it returns either a string or a 2-tuple used for OAuth + 2.0 or OCI IAM token based authentication and is useful when the pool + needs to expand and create new connections but the current authentication + token has expired + (default: None) + + - ``host``: the hostname or IP address of the machine hosting the database + or the database listener + (default: None) + + - ``port``: the port number on which the database listener is listening + (default: 1521) + + - ``protocol``: one of the strings "tcp" or "tcps" indicating whether to + use unencrypted network traffic or encrypted network traffic (TLS) + (default: "tcp") + + - ``https_proxy``: the hostname or IP address of a proxy host to use for + tunneling secure connections + (default: None) + + - ``https_proxy_port``: the port on which to communicate with the proxy + host + (default: 0) + + - ``service_name``: the service name of the database + (default: None) + + - ``instance_name``: the instance name of the database + (default: None) + + - ``sid``: the system identifier (SID) of the database. Note using a + service_name instead is recommended + (default: None) + + - ``server_type``: the type of server connection that should be + established. If specified, it should be one of strings "dedicated", + "shared" or "pooled" + (default: None) + + - ``cclass``: the connection class to use for Database Resident Connection + Pooling (DRCP) + (default: None) + + - ``purity``: the connection purity to use for Database Resident Connection + Pooling (DRCP) + (default: :attr:`oracledb.PURITY_DEFAULT`) + + - ``expire_time``: the number of minutes between the sending of keepalive + probes. If this parameter is set to a value greater than zero it enables + keepalive + (default: 0) + + - ``retry_count``: the number of times that initial connection + establishment should be retried before the connection attempt is + terminated + (default: 0) + + - ``retry_delay``: the number of seconds to wait before retrying to + establish a connection + (default: 1) + + - ``tcp_connect_timeout``: a float indicating the maximum number of seconds + to wait when establishing a connection to the database host + (default: 20.0) + + - ``ssl_server_dn_match``: a boolean indicating whether the server + certificate distinguished name (DN) should be matched in addition to the + regular certificate verification that is performed. Note that if the + ssl_server_cert_dn parameter is not privided, host name matching is + performed instead + (default: True) + + - ``ssl_server_cert_dn``: the distinguished name (DN) which should be + matched with the server. This value is ignored if the ssl_server_dn_match + parameter is not set to the value True. If specified this value is used + for any verfication. Otherwise the hostname will be used + (default: None) + + - ``wallet_location``: the directory where the wallet can be found. In + python-oracledb Thin mode this must be the directory containing the PEM- + encoded wallet file ewallet.pem. In python-oracledb Thick mode this must + be the directory containing the file cwallet.sso + (default: None) + + - ``events``: a boolean specifying whether events mode should be enabled. + This value is only used in python-oracledb Thick mode and is needed for + continuous query notification and high availability event notifications + (default: False) + + - ``externalauth``: a boolean indicating whether to use external + authentication + (default: False) + + - ``mode``: the authorization mode to use. One of the constants + :data:`oracledb.AUTH_MODE_DEFAULT`, :data:`oracledb.AUTH_MODE_PRELIM`, + :data:`oracledb.AUTH_MODE_SYSASM`, :data:`oracledb.AUTH_MODE_SYSBKP`, + :data:`oracledb.AUTH_MODE_SYSDBA`, :data:`oracledb.AUTH_MODE_SYSDGD`, + :data:`oracledb.AUTH_MODE_SYSKMT`, :data:`oracledb.AUTH_MODE_SYSOPER`, or + :data:`oracledb.AUTH_MODE_SYSRAC` + (default: :attr:`oracledb.AUTH_MODE_DEFAULT`) + + - ``disable_oob``: a boolean indicating whether out-of-band breaks should + be disabled. This value is only used in python-oracledb Thin mode. It has + no effect on Windows which does not support this functionality + (default: False) + + - ``stmtcachesize``: the size of the statement cache + (default: :attr:`oracledb.defaults.stmtcachesize + `) + + - ``edition``: edition to use for the connection. This parameter cannot be + used simultaneously with the cclass parameter + (default: None) + + - ``tag``: identifies the type of connection that should be returned from a + pool. This value is only used in python-oracledb Thick mode + (default: None) + + - ``matchanytag``: a boolean specifying whether any tag can be used when + acquiring a connection from the pool. This value is only used in python- + oracledb Thick mode + (default: False) + + - ``config_dir``: a directory in which the optional tnsnames.ora + configuration file is located. This value is only used in python-oracledb + Thin mode. For python-oracledb Thick mode, it is used if + :attr:`oracledb.defaults.thick_mode_dsn_passthrough + ` is *False*. Otherwise in Thick + mode use the ``config_dir`` parameter of + :meth:`oracledb.init_oracle_client()` + (default: :attr:`oracledb.defaults.config_dir + `) + + - ``appcontext``: application context used by the connection. It should be + a list of 3-tuples (namespace, name, value) and each entry in the tuple + should be a string + (default: None) + + - ``shardingkey``: a list of strings, numbers, bytes or dates that identify + the database shard to connect to. This value is only used in python- + oracledb Thick mode + (default: None) + + - ``supershardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + (default: None) + + - ``debug_jdwp``: a string with the format "host=;port=" that + specifies the host and port of the PL/SQL debugger. This value is only + used in python-oracledb Thin mode. For python-oracledb Thick mode set + the ORA_DEBUG_JDWP environment variable + (default: None) + + - ``connection_id_prefix``: an application specific prefix that is added to + the connection identifier used for tracing + (default: None) + + - ``ssl_context``: an SSLContext object used for connecting to the database + using TLS. This SSL context will be modified to include the private key + or any certificates found in a separately supplied wallet. This parameter + should only be specified if the default SSLContext object cannot be used + (default: None) + + - ``sdu``: the requested size of the Session Data Unit (SDU), in bytes. The + value tunes internal buffers used for communication to the database. + Bigger values can increase throughput for large queries or bulk data + loads, but at the cost of higher memory use. The SDU size that will + actually be used is negotiated down to the lower of this value and the + database network SDU configuration value + (default: 8192) + + - ``pool_boundary``: one of the values "statement" or "transaction" + indicating when pooled DRCP connections can be returned to the pool. This + requires the use of DRCP with Oracle Database 23.4 or higher + (default: None) + + - ``use_tcp_fast_open``: a boolean indicating whether to use TCP fast open. + This is an Oracle Autonomous Database Serverless (ADB-S) specific + property for clients connecting from within OCI Cloud network. Please + refer to the ADB-S documentation for more information + (default: False) + + - ``ssl_version``: one of the values ssl.TLSVersion.TLSv1_2 or + ssl.TLSVersion.TLSv1_3 indicating which TLS version to use + (default: None) + + - ``program``: a string recorded by Oracle Database as the program from + which the connection originates + (default: :attr:`oracledb.defaults.program + `) + + - ``machine``: a string recorded by Oracle Database as the name of the + machine from which the connection originates + (default: :attr:`oracledb.defaults.machine + `) + + - ``terminal``: a string recorded by Oracle Database as the terminal + identifier from which the connection originates + (default: :attr:`oracledb.defaults.terminal + `) + + - ``osuser``: a string recorded by Oracle Database as the operating system + user who originated the connection + (default: :attr:`oracledb.defaults.osuser + `) + + - ``driver_name``: a string recorded by Oracle Database as the name of the + driver which originated the connection + (default: :attr:`oracledb.defaults.driver_name + `) + + - ``use_sni``: a boolean indicating whether to use the TLS SNI extension to + bypass the second TLS neogiation that would otherwise be required + (default: False) + + - ``thick_mode_dsn_passthrough``: a boolean indicating whether to pass the + connect string to the Oracle Client libraries unchanged without parsing + by the driver. Setting this to False makes python-oracledb Thick and Thin + mode applications behave similarly regarding connection string parameter + handling and locating any optional tnsnames.ora configuration file + (default: :attr:`oracledb.defaults.thick_mode_dsn_passthrough + `) + + - ``extra_auth_params``: a dictionary containing configuration parameters + necessary for Oracle Database authentication using plugins, such as the + Azure and OCI cloud-native authentication plugins + (default: None) + + - ``pool_name``: the name of the DRCP pool when using multi-pool DRCP with + Oracle Database 23.4, or higher + (default: None) + + - ``handle``: an integer representing a pointer to a valid service context + handle. This value is only used in python-oracledb Thick mode. It should + be used with extreme caution + (default: 0) + """ + pass + + +class NamedPools: + + def __init__(self): + self.lock = threading.Lock() + self.pools = {} + + def add_pool(self, alias, pool): + """ + Adds a pool to the cache. An exception is raised if a pool is already + cached with the given alias. + """ + if not isinstance(alias, str): + raise TypeError("pool_alias must be a string") + with self.lock: + if alias in self.pools: + errors._raise_err(errors.ERR_NAMED_POOL_EXISTS, alias=alias) + self.pools[alias] = pool + + def remove_pool(self, alias): + """ + Removes the pool with the given alias from the cache. An exception is + raised if there is no pool cached with the given alias. + """ + with self.lock: + if alias not in self.pools: + errors._raise_err(errors.ERR_NAMED_POOL_MISSING, alias=alias) + del self.pools[alias] + + +named_pools = NamedPools() + + +def get_pool( + pool_alias: str, +) -> Union[ConnectionPool, AsyncConnectionPool, None]: + """ + Returns a :ref:`ConnectionPool object ` from the python-oracledb + pool cache. The pool must have been previously created by passing the same + ``pool_alias`` value to :meth:`oracledb.create_pool()` or + :meth:`oracledb.create_pool_async()`. + + If a pool with the given name does not exist, *None* is returned. + """ + return named_pools.pools.get(pool_alias) diff --git a/.venv/lib/python3.9/site-packages/oracledb/pool_params.py b/.venv/lib/python3.9/site-packages/oracledb/pool_params.py new file mode 100644 index 0000000..f35cefa --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/pool_params.py @@ -0,0 +1,1017 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2022, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# pool_params.py +# +# Contains the PoolParams class used for managing the parameters required to +# create a connection pool. +# +# *** NOTICE *** This file is generated from a template and should not be +# modified directly. See build_from_template.py in the utils subdirectory for +# more information. +# ----------------------------------------------------------------------------- + +import ssl +from typing import Callable, Type, Union, Any, Optional + +import oracledb + +from . import base_impl, utils +from .connect_params import ConnectParams + + +class PoolParams(ConnectParams): + """ + Contains all parameters used for creating a connection pool. + """ + + __module__ = oracledb.__name__ + __slots__ = ["_impl"] + _impl_class = base_impl.PoolParamsImpl + + @utils.params_initer + def __init__( + self, + *, + min: Optional[int] = None, + max: Optional[int] = None, + increment: Optional[int] = None, + connectiontype: Optional[Type["oracledb.Connection"]] = None, + getmode: Optional[oracledb.PoolGetMode] = None, + homogeneous: Optional[bool] = None, + timeout: Optional[int] = None, + wait_timeout: Optional[int] = None, + max_lifetime_session: Optional[int] = None, + session_callback: Optional[Callable] = None, + max_sessions_per_shard: Optional[int] = None, + soda_metadata_cache: Optional[bool] = None, + ping_interval: Optional[int] = None, + ping_timeout: Optional[int] = None, + user: Optional[str] = None, + proxy_user: Optional[str] = None, + password: Optional[str] = None, + newpassword: Optional[str] = None, + wallet_password: Optional[str] = None, + access_token: Optional[Union[str, tuple, Callable]] = None, + host: Optional[str] = None, + port: Optional[int] = None, + protocol: Optional[str] = None, + https_proxy: Optional[str] = None, + https_proxy_port: Optional[int] = None, + service_name: Optional[str] = None, + instance_name: Optional[str] = None, + sid: Optional[str] = None, + server_type: Optional[str] = None, + cclass: Optional[str] = None, + purity: Optional[oracledb.Purity] = None, + expire_time: Optional[int] = None, + retry_count: Optional[int] = None, + retry_delay: Optional[int] = None, + tcp_connect_timeout: Optional[float] = None, + ssl_server_dn_match: Optional[bool] = None, + ssl_server_cert_dn: Optional[str] = None, + wallet_location: Optional[str] = None, + events: Optional[bool] = None, + externalauth: Optional[bool] = None, + mode: Optional[oracledb.AuthMode] = None, + disable_oob: Optional[bool] = None, + stmtcachesize: Optional[int] = None, + edition: Optional[str] = None, + tag: Optional[str] = None, + matchanytag: Optional[bool] = None, + config_dir: Optional[str] = None, + appcontext: Optional[list] = None, + shardingkey: Optional[list] = None, + supershardingkey: Optional[list] = None, + debug_jdwp: Optional[str] = None, + connection_id_prefix: Optional[str] = None, + ssl_context: Optional[Any] = None, + sdu: Optional[int] = None, + pool_boundary: Optional[str] = None, + use_tcp_fast_open: Optional[bool] = None, + ssl_version: Optional[ssl.TLSVersion] = None, + program: Optional[str] = None, + machine: Optional[str] = None, + terminal: Optional[str] = None, + osuser: Optional[str] = None, + driver_name: Optional[str] = None, + use_sni: Optional[bool] = None, + thick_mode_dsn_passthrough: Optional[bool] = None, + extra_auth_params: Optional[dict] = None, + pool_name: Optional[str] = None, + handle: Optional[int] = None, + ): + """ + All parameters are optional. A brief description of each parameter + follows: + + - ``min``: the minimum number of connections the pool should contain + (default: 1) + + - ``max``: the maximum number of connections the pool should contain + (default: 2) + + - ``increment``: the number of connections that should be added to the + pool whenever the pool needs to grow + (default: 1) + + - ``connectiontype``: the class of the connection that should be + returned during calls to pool.acquire(). It must be + oracledb.Connection or a subclass of oracledb.Connection + (default: None) + + - ``getmode``: how pool.acquire() will behave. One of the constants + :data:`oracledb.POOL_GETMODE_WAIT`, + :data:`oracledb.POOL_GETMODE_NOWAIT`, + :data:`oracledb.POOL_GETMODE_FORCEGET`, or + :data:`oracledb.POOL_GETMODE_TIMEDWAIT` + (default: :attr:`oracledb.POOL_GETMODE_WAIT`) + + - ``homogeneous``: a boolean indicating whether the connections in the + pool are homogeneous (same user) or heterogeneous (multiple users) + (default: True) + + - ``timeout``: the length of time (in seconds) that a connection may + remain idle in the pool before it is terminated. If it is 0 then + connections are never terminated + (default: 0) + + - ``wait_timeout``: the length of time (in milliseconds) that a caller + should wait when acquiring a connection from the pool with getmode + set to :data:`oracledb.POOL_GETMODE_TIMEDWAIT` + (default: 0) + + - ``max_lifetime_session``: the length of time (in seconds) that + connections can remain in the pool. If it is 0 then connections may + remain in the pool indefinitely + (default: 0) + + - ``session_callback``: a callable that is invoked when a connection is + returned from the pool for the first time, or when the connection tag + differs from the one requested + (default: None) + + - ``max_sessions_per_shard``: the maximum number of connections that + may be associated with a particular shard + (default: 0) + + - ``soda_metadata_cache``: a boolean indicating whether or not the SODA + metadata cache should be enabled + (default: False) + + - ``ping_interval``: the length of time (in seconds) after which an + unused connection in the pool will be a candidate for pinging when + pool.acquire() is called. If the ping to the database indicates the + connection is not alive a replacement connection will be returned by + pool.acquire(). If ping_interval is a negative value the ping + functionality will be disabled + (default: 60) + + - ``ping_timeout``: the maximum length of time (in milliseconds) to + wait for a connection in the pool to respond to an internal ping to + the database before being discarded and replaced during a call to + acquire() + (default: 5000) + + - ``user``: the name of the database user to connect to + (default: None) + + - ``proxy_user``: the name of the proxy user to connect to. If this + value is not specified, it will be parsed out of user if user is in + the form "user[proxy_user]" + (default: None) + + - ``password``: the password for the database user + (default: None) + + - ``newpassword``: a new password for the database user. The new + password will take effect immediately upon a successful connection to + the database + (default: None) + + - ``wallet_password``: the password to use to decrypt the wallet, if it + is encrypted. This is not the database password. For Oracle + Autonomous Database this is the password created when downloading the + wallet. This value is only used in python-oracledb Thin mode. + (default: None) + + - ``access_token``: a string, or a 2-tuple, or a callable. If it is a + string, it specifies an Entra ID OAuth2 token used for Open + Authorization (OAuth 2.0) token based authentication. If it is a + 2-tuple, it specifies the token and private key strings used for + Oracle Cloud Infrastructure (OCI) Identity and Access Management + (IAM) token based authentication. If it is a callable, it returns + either a string or a 2-tuple used for OAuth 2.0 or OCI IAM token + based authentication and is useful when the pool needs to expand and + create new connections but the current authentication token has + expired + (default: None) + + - ``host``: the hostname or IP address of the machine hosting the + database or the database listener + (default: None) + + - ``port``: the port number on which the database listener is listening + (default: 1521) + + - ``protocol``: one of the strings "tcp" or "tcps" indicating whether + to use unencrypted network traffic or encrypted network traffic (TLS) + (default: "tcp") + + - ``https_proxy``: the hostname or IP address of a proxy host to use + for tunneling secure connections + (default: None) + + - ``https_proxy_port``: the port on which to communicate with the proxy + host + (default: 0) + + - ``service_name``: the service name of the database + (default: None) + + - ``instance_name``: the instance name of the database + (default: None) + + - ``sid``: the system identifier (SID) of the database. Note using a + service_name instead is recommended + (default: None) + + - ``server_type``: the type of server connection that should be + established. If specified, it should be one of strings "dedicated", + "shared" or "pooled" + (default: None) + + - ``cclass``: the connection class to use for Database Resident + Connection Pooling (DRCP) + (default: None) + + - ``purity``: the connection purity to use for Database Resident + Connection Pooling (DRCP) + (default: :attr:`oracledb.PURITY_DEFAULT`) + + - ``expire_time``: the number of minutes between the sending of + keepalive probes. If this parameter is set to a value greater than + zero it enables keepalive + (default: 0) + + - ``retry_count``: the number of times that initial connection + establishment should be retried before the connection attempt is + terminated + (default: 0) + + - ``retry_delay``: the number of seconds to wait before retrying to + establish a connection + (default: 1) + + - ``tcp_connect_timeout``: a float indicating the maximum number of + seconds to wait when establishing a connection to the database host + (default: 20.0) + + - ``ssl_server_dn_match``: a boolean indicating whether the server + certificate distinguished name (DN) should be matched in addition to + the regular certificate verification that is performed. Note that if + the ssl_server_cert_dn parameter is not privided, host name matching + is performed instead + (default: True) + + - ``ssl_server_cert_dn``: the distinguished name (DN) which should be + matched with the server. This value is ignored if the + ssl_server_dn_match parameter is not set to the value True. If + specified this value is used for any verfication. Otherwise the + hostname will be used + (default: None) + + - ``wallet_location``: the directory where the wallet can be found. In + python-oracledb Thin mode this must be the directory containing the + PEM-encoded wallet file ewallet.pem. In python-oracledb Thick mode + this must be the directory containing the file cwallet.sso + (default: None) + + - ``events``: a boolean specifying whether events mode should be + enabled. This value is only used in python-oracledb Thick mode and is + needed for continuous query notification and high availability event + notifications + (default: False) + + - ``externalauth``: a boolean indicating whether to use external + authentication + (default: False) + + - ``mode``: the authorization mode to use. One of the constants + :data:`oracledb.AUTH_MODE_DEFAULT`, + :data:`oracledb.AUTH_MODE_PRELIM`, :data:`oracledb.AUTH_MODE_SYSASM`, + :data:`oracledb.AUTH_MODE_SYSBKP`, :data:`oracledb.AUTH_MODE_SYSDBA`, + :data:`oracledb.AUTH_MODE_SYSDGD`, :data:`oracledb.AUTH_MODE_SYSKMT`, + :data:`oracledb.AUTH_MODE_SYSOPER`, or + :data:`oracledb.AUTH_MODE_SYSRAC` + (default: :attr:`oracledb.AUTH_MODE_DEFAULT`) + + - ``disable_oob``: a boolean indicating whether out-of-band breaks + should be disabled. This value is only used in python-oracledb Thin + mode. It has no effect on Windows which does not support this + functionality + (default: False) + + - ``stmtcachesize``: the size of the statement cache + (default: :attr:`oracledb.defaults.stmtcachesize + `) + + - ``edition``: edition to use for the connection. This parameter cannot + be used simultaneously with the cclass parameter + (default: None) + + - ``tag``: identifies the type of connection that should be returned + from a pool. This value is only used in python-oracledb Thick mode + (default: None) + + - ``matchanytag``: a boolean specifying whether any tag can be used + when acquiring a connection from the pool. This value is only used in + python-oracledb Thick mode + (default: False) + + - ``config_dir``: a directory in which the optional tnsnames.ora + configuration file is located. This value is only used in python- + oracledb Thin mode. For python-oracledb Thick mode, it is used if + :attr:`oracledb.defaults.thick_mode_dsn_passthrough + ` is *False*. Otherwise in Thick + mode use the ``config_dir`` parameter of + :meth:`oracledb.init_oracle_client()` + (default: :attr:`oracledb.defaults.config_dir + `) + + - ``appcontext``: application context used by the connection. It should + be a list of 3-tuples (namespace, name, value) and each entry in the + tuple should be a string + (default: None) + + - ``shardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + (default: None) + + - ``supershardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + (default: None) + + - ``debug_jdwp``: a string with the format "host=;port=" + that specifies the host and port of the PL/SQL debugger. This value + is only used in python-oracledb Thin mode. For python-oracledb Thick + mode set the ORA_DEBUG_JDWP environment variable + (default: None) + + - ``connection_id_prefix``: an application specific prefix that is + added to the connection identifier used for tracing + (default: None) + + - ``ssl_context``: an SSLContext object used for connecting to the + database using TLS. This SSL context will be modified to include the + private key or any certificates found in a separately supplied + wallet. This parameter should only be specified if the default + SSLContext object cannot be used + (default: None) + + - ``sdu``: the requested size of the Session Data Unit (SDU), in bytes. + The value tunes internal buffers used for communication to the + database. Bigger values can increase throughput for large queries or + bulk data loads, but at the cost of higher memory use. The SDU size + that will actually be used is negotiated down to the lower of this + value and the database network SDU configuration value + (default: 8192) + + - ``pool_boundary``: one of the values "statement" or "transaction" + indicating when pooled DRCP connections can be returned to the pool. + This requires the use of DRCP with Oracle Database 23.4 or higher + (default: None) + + - ``use_tcp_fast_open``: a boolean indicating whether to use TCP fast + open. This is an Oracle Autonomous Database Serverless (ADB-S) + specific property for clients connecting from within OCI Cloud + network. Please refer to the ADB-S documentation for more information + (default: False) + + - ``ssl_version``: one of the values ssl.TLSVersion.TLSv1_2 or + ssl.TLSVersion.TLSv1_3 indicating which TLS version to use + (default: None) + + - ``program``: a string recorded by Oracle Database as the program from + which the connection originates + (default: :attr:`oracledb.defaults.program + `) + + - ``machine``: a string recorded by Oracle Database as the name of the + machine from which the connection originates + (default: :attr:`oracledb.defaults.machine + `) + + - ``terminal``: a string recorded by Oracle Database as the terminal + identifier from which the connection originates + (default: :attr:`oracledb.defaults.terminal + `) + + - ``osuser``: a string recorded by Oracle Database as the operating + system user who originated the connection + (default: :attr:`oracledb.defaults.osuser + `) + + - ``driver_name``: a string recorded by Oracle Database as the name of + the driver which originated the connection + (default: :attr:`oracledb.defaults.driver_name + `) + + - ``use_sni``: a boolean indicating whether to use the TLS SNI + extension to bypass the second TLS neogiation that would otherwise be + required + (default: False) + + - ``thick_mode_dsn_passthrough``: a boolean indicating whether to pass + the connect string to the Oracle Client libraries unchanged without + parsing by the driver. Setting this to False makes python-oracledb + Thick and Thin mode applications behave similarly regarding + connection string parameter handling and locating any optional + tnsnames.ora configuration file + (default: :attr:`oracledb.defaults.thick_mode_dsn_passthrough + `) + + - ``extra_auth_params``: a dictionary containing configuration + parameters necessary for Oracle Database authentication using + plugins, such as the Azure and OCI cloud-native authentication + plugins + (default: None) + + - ``pool_name``: the name of the DRCP pool when using multi-pool DRCP + with Oracle Database 23.4, or higher + (default: None) + + - ``handle``: an integer representing a pointer to a valid service + context handle. This value is only used in python-oracledb Thick + mode. It should be used with extreme caution + (default: 0) + """ + pass + + def __repr__(self): + return ( + self.__class__.__qualname__ + "(" + f"min={self.min!r}, " + f"max={self.max!r}, " + f"increment={self.increment!r}, " + f"connectiontype={self.connectiontype!r}, " + f"getmode={self.getmode!r}, " + f"homogeneous={self.homogeneous!r}, " + f"timeout={self.timeout!r}, " + f"wait_timeout={self.wait_timeout!r}, " + f"max_lifetime_session={self.max_lifetime_session!r}, " + f"session_callback={self.session_callback!r}, " + f"max_sessions_per_shard={self.max_sessions_per_shard!r}, " + f"soda_metadata_cache={self.soda_metadata_cache!r}, " + f"ping_interval={self.ping_interval!r}, " + f"ping_timeout={self.ping_timeout!r}, " + f"user={self.user!r}, " + f"proxy_user={self.proxy_user!r}, " + f"host={self.host!r}, " + f"port={self.port!r}, " + f"protocol={self.protocol!r}, " + f"https_proxy={self.https_proxy!r}, " + f"https_proxy_port={self.https_proxy_port!r}, " + f"service_name={self.service_name!r}, " + f"instance_name={self.instance_name!r}, " + f"sid={self.sid!r}, " + f"server_type={self.server_type!r}, " + f"cclass={self.cclass!r}, " + f"purity={self.purity!r}, " + f"expire_time={self.expire_time!r}, " + f"retry_count={self.retry_count!r}, " + f"retry_delay={self.retry_delay!r}, " + f"tcp_connect_timeout={self.tcp_connect_timeout!r}, " + f"ssl_server_dn_match={self.ssl_server_dn_match!r}, " + f"ssl_server_cert_dn={self.ssl_server_cert_dn!r}, " + f"wallet_location={self.wallet_location!r}, " + f"events={self.events!r}, " + f"externalauth={self.externalauth!r}, " + f"mode={self.mode!r}, " + f"disable_oob={self.disable_oob!r}, " + f"stmtcachesize={self.stmtcachesize!r}, " + f"edition={self.edition!r}, " + f"tag={self.tag!r}, " + f"matchanytag={self.matchanytag!r}, " + f"config_dir={self.config_dir!r}, " + f"appcontext={self.appcontext!r}, " + f"shardingkey={self.shardingkey!r}, " + f"supershardingkey={self.supershardingkey!r}, " + f"debug_jdwp={self.debug_jdwp!r}, " + f"connection_id_prefix={self.connection_id_prefix!r}, " + f"ssl_context={self.ssl_context!r}, " + f"sdu={self.sdu!r}, " + f"pool_boundary={self.pool_boundary!r}, " + f"use_tcp_fast_open={self.use_tcp_fast_open!r}, " + f"ssl_version={self.ssl_version!r}, " + f"program={self.program!r}, " + f"machine={self.machine!r}, " + f"terminal={self.terminal!r}, " + f"osuser={self.osuser!r}, " + f"driver_name={self.driver_name!r}, " + f"use_sni={self.use_sni!r}, " + f"thick_mode_dsn_passthrough={self.thick_mode_dsn_passthrough!r}, " + f"extra_auth_params={self.extra_auth_params!r}, " + f"pool_name={self.pool_name!r}" + ")" + ) + + @property + def connectiontype(self) -> Type["oracledb.Connection"]: + """ + The class of the connection that should be returned during calls to + pool.acquire(). It must be oracledb.Connection or a subclass of + oracledb.Connection. + """ + return self._impl.connectiontype + + @property + def getmode(self) -> oracledb.PoolGetMode: + """ + How pool.acquire() will behave. One of the constants + :data:`oracledb.POOL_GETMODE_WAIT`, + :data:`oracledb.POOL_GETMODE_NOWAIT`, + :data:`oracledb.POOL_GETMODE_FORCEGET`, or + :data:`oracledb.POOL_GETMODE_TIMEDWAIT`. + """ + return oracledb.PoolGetMode(self._impl.getmode) + + @property + def homogeneous(self) -> bool: + """ + A boolean indicating whether the connections in the pool are + homogeneous (same user) or heterogeneous (multiple users). + """ + return self._impl.homogeneous + + @property + def increment(self) -> int: + """ + The number of connections that should be added to the pool whenever the + pool needs to grow. + """ + return self._impl.increment + + @property + def max(self) -> int: + """ + The maximum number of connections the pool should contain. + """ + return self._impl.max + + @property + def max_lifetime_session(self) -> int: + """ + The length of time (in seconds) that connections can remain in the + pool. If it is 0 then connections may remain in the pool indefinitely. + """ + return self._impl.max_lifetime_session + + @property + def max_sessions_per_shard(self) -> int: + """ + The maximum number of connections that may be associated with a + particular shard. + """ + return self._impl.max_sessions_per_shard + + @property + def min(self) -> int: + """ + The minimum number of connections the pool should contain. + """ + return self._impl.min + + @property + def ping_interval(self) -> int: + """ + The length of time (in seconds) after which an unused connection in the + pool will be a candidate for pinging when pool.acquire() is called. If + the ping to the database indicates the connection is not alive a + replacement connection will be returned by pool.acquire(). If + ping_interval is a negative value the ping functionality will be + disabled. + """ + return self._impl.ping_interval + + @property + def ping_timeout(self) -> int: + """ + The maximum length of time (in milliseconds) to wait for a connection + in the pool to respond to an internal ping to the database before being + discarded and replaced during a call to acquire(). + """ + return self._impl.ping_timeout + + @property + def session_callback(self) -> Callable: + """ + A callable that is invoked when a connection is returned from the pool + for the first time, or when the connection tag differs from the one + requested. + """ + return self._impl.session_callback + + @property + def soda_metadata_cache(self) -> bool: + """ + A boolean indicating whether or not the SODA metadata cache should be + enabled. + """ + return self._impl.soda_metadata_cache + + @property + def timeout(self) -> int: + """ + The length of time (in seconds) that a connection may remain idle in + the pool before it is terminated. If it is 0 then connections are never + terminated. + """ + return self._impl.timeout + + @property + def wait_timeout(self) -> int: + """ + The length of time (in milliseconds) that a caller should wait when + acquiring a connection from the pool with getmode set to + :data:`oracledb.POOL_GETMODE_TIMEDWAIT`. + """ + return self._impl.wait_timeout + + def copy(self) -> "PoolParams": + """ + Creates a copy of the parameters and returns it. + """ + params = PoolParams.__new__(PoolParams) + params._impl = self._impl.copy() + return params + + @utils.params_setter + def set( + self, + *, + min: Optional[int] = None, + max: Optional[int] = None, + increment: Optional[int] = None, + connectiontype: Optional[Type["oracledb.Connection"]] = None, + getmode: Optional[oracledb.PoolGetMode] = None, + homogeneous: Optional[bool] = None, + timeout: Optional[int] = None, + wait_timeout: Optional[int] = None, + max_lifetime_session: Optional[int] = None, + session_callback: Optional[Callable] = None, + max_sessions_per_shard: Optional[int] = None, + soda_metadata_cache: Optional[bool] = None, + ping_interval: Optional[int] = None, + ping_timeout: Optional[int] = None, + user: Optional[str] = None, + proxy_user: Optional[str] = None, + password: Optional[str] = None, + newpassword: Optional[str] = None, + wallet_password: Optional[str] = None, + access_token: Optional[Union[str, tuple, Callable]] = None, + host: Optional[str] = None, + port: Optional[int] = None, + protocol: Optional[str] = None, + https_proxy: Optional[str] = None, + https_proxy_port: Optional[int] = None, + service_name: Optional[str] = None, + instance_name: Optional[str] = None, + sid: Optional[str] = None, + server_type: Optional[str] = None, + cclass: Optional[str] = None, + purity: Optional[oracledb.Purity] = None, + expire_time: Optional[int] = None, + retry_count: Optional[int] = None, + retry_delay: Optional[int] = None, + tcp_connect_timeout: Optional[float] = None, + ssl_server_dn_match: Optional[bool] = None, + ssl_server_cert_dn: Optional[str] = None, + wallet_location: Optional[str] = None, + events: Optional[bool] = None, + externalauth: Optional[bool] = None, + mode: Optional[oracledb.AuthMode] = None, + disable_oob: Optional[bool] = None, + stmtcachesize: Optional[int] = None, + edition: Optional[str] = None, + tag: Optional[str] = None, + matchanytag: Optional[bool] = None, + config_dir: Optional[str] = None, + appcontext: Optional[list] = None, + shardingkey: Optional[list] = None, + supershardingkey: Optional[list] = None, + debug_jdwp: Optional[str] = None, + connection_id_prefix: Optional[str] = None, + ssl_context: Optional[Any] = None, + sdu: Optional[int] = None, + pool_boundary: Optional[str] = None, + use_tcp_fast_open: Optional[bool] = None, + ssl_version: Optional[ssl.TLSVersion] = None, + program: Optional[str] = None, + machine: Optional[str] = None, + terminal: Optional[str] = None, + osuser: Optional[str] = None, + driver_name: Optional[str] = None, + use_sni: Optional[bool] = None, + thick_mode_dsn_passthrough: Optional[bool] = None, + extra_auth_params: Optional[dict] = None, + pool_name: Optional[str] = None, + handle: Optional[int] = None, + ): + """ + All parameters are optional. A brief description of each parameter + follows: + + - ``min``: the minimum number of connections the pool should contain + + - ``max``: the maximum number of connections the pool should contain + + - ``increment``: the number of connections that should be added to the + pool whenever the pool needs to grow + + - ``connectiontype``: the class of the connection that should be + returned during calls to pool.acquire(). It must be + oracledb.Connection or a subclass of oracledb.Connection + + - ``getmode``: how pool.acquire() will behave. One of the constants + :data:`oracledb.POOL_GETMODE_WAIT`, + :data:`oracledb.POOL_GETMODE_NOWAIT`, + :data:`oracledb.POOL_GETMODE_FORCEGET`, or + :data:`oracledb.POOL_GETMODE_TIMEDWAIT` + + - ``homogeneous``: a boolean indicating whether the connections in the + pool are homogeneous (same user) or heterogeneous (multiple users) + + - ``timeout``: the length of time (in seconds) that a connection may + remain idle in the pool before it is terminated. If it is 0 then + connections are never terminated + + - ``wait_timeout``: the length of time (in milliseconds) that a caller + should wait when acquiring a connection from the pool with getmode + set to :data:`oracledb.POOL_GETMODE_TIMEDWAIT` + + - ``max_lifetime_session``: the length of time (in seconds) that + connections can remain in the pool. If it is 0 then connections may + remain in the pool indefinitely + + - ``session_callback``: a callable that is invoked when a connection is + returned from the pool for the first time, or when the connection tag + differs from the one requested + + - ``max_sessions_per_shard``: the maximum number of connections that + may be associated with a particular shard + + - ``soda_metadata_cache``: a boolean indicating whether or not the SODA + metadata cache should be enabled + + - ``ping_interval``: the length of time (in seconds) after which an + unused connection in the pool will be a candidate for pinging when + pool.acquire() is called. If the ping to the database indicates the + connection is not alive a replacement connection will be returned by + pool.acquire(). If ping_interval is a negative value the ping + functionality will be disabled + + - ``ping_timeout``: the maximum length of time (in milliseconds) to + wait for a connection in the pool to respond to an internal ping to + the database before being discarded and replaced during a call to + acquire() + + - ``user``: the name of the database user to connect to + + - ``proxy_user``: the name of the proxy user to connect to. If this + value is not specified, it will be parsed out of user if user is in + the form "user[proxy_user]" + + - ``password``: the password for the database user + + - ``newpassword``: a new password for the database user. The new + password will take effect immediately upon a successful connection to + the database + + - ``wallet_password``: the password to use to decrypt the wallet, if it + is encrypted. This is not the database password. For Oracle + Autonomous Database this is the password created when downloading the + wallet. This value is only used in python-oracledb Thin mode. + + - ``access_token``: a string, or a 2-tuple, or a callable. If it is a + string, it specifies an Entra ID OAuth2 token used for Open + Authorization (OAuth 2.0) token based authentication. If it is a + 2-tuple, it specifies the token and private key strings used for + Oracle Cloud Infrastructure (OCI) Identity and Access Management + (IAM) token based authentication. If it is a callable, it returns + either a string or a 2-tuple used for OAuth 2.0 or OCI IAM token + based authentication and is useful when the pool needs to expand and + create new connections but the current authentication token has + expired + + - ``host``: the hostname or IP address of the machine hosting the + database or the database listener + + - ``port``: the port number on which the database listener is listening + + - ``protocol``: one of the strings "tcp" or "tcps" indicating whether + to use unencrypted network traffic or encrypted network traffic (TLS) + + - ``https_proxy``: the hostname or IP address of a proxy host to use + for tunneling secure connections + + - ``https_proxy_port``: the port on which to communicate with the proxy + host + + - ``service_name``: the service name of the database + + - ``instance_name``: the instance name of the database + + - ``sid``: the system identifier (SID) of the database. Note using a + service_name instead is recommended + + - ``server_type``: the type of server connection that should be + established. If specified, it should be one of strings "dedicated", + "shared" or "pooled" + + - ``cclass``: the connection class to use for Database Resident + Connection Pooling (DRCP) + + - ``purity``: the connection purity to use for Database Resident + Connection Pooling (DRCP) + + - ``expire_time``: the number of minutes between the sending of + keepalive probes. If this parameter is set to a value greater than + zero it enables keepalive + + - ``retry_count``: the number of times that initial connection + establishment should be retried before the connection attempt is + terminated + + - ``retry_delay``: the number of seconds to wait before retrying to + establish a connection + + - ``tcp_connect_timeout``: a float indicating the maximum number of + seconds to wait when establishing a connection to the database host + + - ``ssl_server_dn_match``: a boolean indicating whether the server + certificate distinguished name (DN) should be matched in addition to + the regular certificate verification that is performed. Note that if + the ssl_server_cert_dn parameter is not privided, host name matching + is performed instead + + - ``ssl_server_cert_dn``: the distinguished name (DN) which should be + matched with the server. This value is ignored if the + ssl_server_dn_match parameter is not set to the value True. If + specified this value is used for any verfication. Otherwise the + hostname will be used + + - ``wallet_location``: the directory where the wallet can be found. In + python-oracledb Thin mode this must be the directory containing the + PEM-encoded wallet file ewallet.pem. In python-oracledb Thick mode + this must be the directory containing the file cwallet.sso + + - ``events``: a boolean specifying whether events mode should be + enabled. This value is only used in python-oracledb Thick mode and is + needed for continuous query notification and high availability event + notifications + + - ``externalauth``: a boolean indicating whether to use external + authentication + + - ``mode``: the authorization mode to use. One of the constants + :data:`oracledb.AUTH_MODE_DEFAULT`, + :data:`oracledb.AUTH_MODE_PRELIM`, :data:`oracledb.AUTH_MODE_SYSASM`, + :data:`oracledb.AUTH_MODE_SYSBKP`, :data:`oracledb.AUTH_MODE_SYSDBA`, + :data:`oracledb.AUTH_MODE_SYSDGD`, :data:`oracledb.AUTH_MODE_SYSKMT`, + :data:`oracledb.AUTH_MODE_SYSOPER`, or + :data:`oracledb.AUTH_MODE_SYSRAC` + + - ``disable_oob``: a boolean indicating whether out-of-band breaks + should be disabled. This value is only used in python-oracledb Thin + mode. It has no effect on Windows which does not support this + functionality + + - ``stmtcachesize``: the size of the statement cache + + - ``edition``: edition to use for the connection. This parameter cannot + be used simultaneously with the cclass parameter + + - ``tag``: identifies the type of connection that should be returned + from a pool. This value is only used in python-oracledb Thick mode + + - ``matchanytag``: a boolean specifying whether any tag can be used + when acquiring a connection from the pool. This value is only used in + python-oracledb Thick mode + + - ``config_dir``: a directory in which the optional tnsnames.ora + configuration file is located. This value is only used in python- + oracledb Thin mode. For python-oracledb Thick mode, it is used if + :attr:`oracledb.defaults.thick_mode_dsn_passthrough + ` is *False*. Otherwise in Thick + mode use the ``config_dir`` parameter of + :meth:`oracledb.init_oracle_client()` + + - ``appcontext``: application context used by the connection. It should + be a list of 3-tuples (namespace, name, value) and each entry in the + tuple should be a string + + - ``shardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + + - ``supershardingkey``: a list of strings, numbers, bytes or dates that + identify the database shard to connect to. This value is only used in + python-oracledb Thick mode + + - ``debug_jdwp``: a string with the format "host=;port=" + that specifies the host and port of the PL/SQL debugger. This value + is only used in python-oracledb Thin mode. For python-oracledb Thick + mode set the ORA_DEBUG_JDWP environment variable + + - ``connection_id_prefix``: an application specific prefix that is + added to the connection identifier used for tracing + + - ``ssl_context``: an SSLContext object used for connecting to the + database using TLS. This SSL context will be modified to include the + private key or any certificates found in a separately supplied + wallet. This parameter should only be specified if the default + SSLContext object cannot be used + + - ``sdu``: the requested size of the Session Data Unit (SDU), in bytes. + The value tunes internal buffers used for communication to the + database. Bigger values can increase throughput for large queries or + bulk data loads, but at the cost of higher memory use. The SDU size + that will actually be used is negotiated down to the lower of this + value and the database network SDU configuration value + + - ``pool_boundary``: one of the values "statement" or "transaction" + indicating when pooled DRCP connections can be returned to the pool. + This requires the use of DRCP with Oracle Database 23.4 or higher + + - ``use_tcp_fast_open``: a boolean indicating whether to use TCP fast + open. This is an Oracle Autonomous Database Serverless (ADB-S) + specific property for clients connecting from within OCI Cloud + network. Please refer to the ADB-S documentation for more information + + - ``ssl_version``: one of the values ssl.TLSVersion.TLSv1_2 or + ssl.TLSVersion.TLSv1_3 indicating which TLS version to use + + - ``program``: a string recorded by Oracle Database as the program from + which the connection originates + + - ``machine``: a string recorded by Oracle Database as the name of the + machine from which the connection originates + + - ``terminal``: a string recorded by Oracle Database as the terminal + identifier from which the connection originates + + - ``osuser``: a string recorded by Oracle Database as the operating + system user who originated the connection + + - ``driver_name``: a string recorded by Oracle Database as the name of + the driver which originated the connection + + - ``use_sni``: a boolean indicating whether to use the TLS SNI + extension to bypass the second TLS neogiation that would otherwise be + required + + - ``thick_mode_dsn_passthrough``: a boolean indicating whether to pass + the connect string to the Oracle Client libraries unchanged without + parsing by the driver. Setting this to False makes python-oracledb + Thick and Thin mode applications behave similarly regarding + connection string parameter handling and locating any optional + tnsnames.ora configuration file + + - ``extra_auth_params``: a dictionary containing configuration + parameters necessary for Oracle Database authentication using + plugins, such as the Azure and OCI cloud-native authentication + plugins + + - ``pool_name``: the name of the DRCP pool when using multi-pool DRCP + with Oracle Database 23.4, or higher + + - ``handle``: an integer representing a pointer to a valid service + context handle. This value is only used in python-oracledb Thick + mode. It should be used with extreme caution + """ + pass diff --git a/.venv/lib/python3.9/site-packages/oracledb/py.typed b/.venv/lib/python3.9/site-packages/oracledb/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/oracledb/soda.py b/.venv/lib/python3.9/site-packages/oracledb/soda.py new file mode 100644 index 0000000..39a6fa7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/soda.py @@ -0,0 +1,746 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# soda.py +# +# Contains the classes for managing Simple Oracle Document Access (SODA): +# SodaDatabase, SodaCollection, SodaDocument, SodaDocCursor and SodaOperation. +# ----------------------------------------------------------------------------- + +from typing import Any, Optional, Union +from typing_extensions import Self +import json + +from .base import BaseMetaClass +from . import errors + + +class SodaDatabase(metaclass=BaseMetaClass): + def __repr__(self): + cls_name = self.__class__._public_name + return f"<{cls_name} on {self._conn!r}>" + + @classmethod + def _from_impl(cls, conn, impl): + db = cls.__new__(cls) + db._conn = conn + db._impl = impl + return db + + def _create_doc_impl( + self, content: Any, key: str = None, media_type: str = None + ) -> "SodaDocument": + """ + Internal method used for creating a document implementation object with + the given content, key and media type. + """ + if isinstance(content, str): + content_bytes = content.encode() + elif isinstance(content, bytes): + content_bytes = content + elif self._impl.supports_json: + return self._impl.create_json_document(content, key) + else: + content_bytes = json.dumps(content).encode() + return self._impl.create_document(content_bytes, key, media_type) + + def createCollection( + self, + name: str, + metadata: Optional[Union[str, dict]] = None, + mapMode: bool = False, + ) -> "SodaCollection": + """ + Creates a SODA collection with the given name and returns a new SODA + collection object. If you try to create a collection, and a collection + with the same name and metadata already exists, then that existing + collection is opened without error. + + If ``metadata`` is specified, it is expected to be a string containing + valid JSON or a dictionary that will be transformed into a JSON string. + This JSON permits you to specify the configuration of the collection + including storage options; specifying the presence or absence of + columns for creation timestamp, last modified timestamp and version; + whether the collection can store only JSON documents; and methods of + key and version generation. The default metadata creates a collection + that only supports JSON documents and uses system generated keys. + + If the ``mapMode`` parameter is set to *True*, the new collection is + mapped to an existing table instead of creating a table. If a + collection is created in this way, dropping the collection will not + drop the existing table either. + """ + if metadata is not None and not isinstance(metadata, str): + metadata = json.dumps(metadata) + collection_impl = self._impl.create_collection(name, metadata, mapMode) + return SodaCollection._from_impl(self, collection_impl) + + def createDocument( + self, + content: Any, + key: Optional[str] = None, + mediaType: str = "application/json", + ) -> "SodaDocument": + """ + Creates a SODA document usable for SODA write operations. You only need + to use this method if your collection requires client-assigned keys or + has non-JSON content; otherwise, you can pass your content directly to + SODA write operations. SodaDocument attributes + :attr:`~SodaDoc.createdOn`, :attr:`~SodaDoc.lastModified`, and + :attr:`~SodaDoc.version` will be *None*. + + The ``content`` parameter can be a dictionary or list which will be + transformed into a JSON string and then UTF-8 encoded. It can also be a + string which will be UTF-8 encoded or it can be a bytes object which + will be stored unchanged. If a bytes object is provided and the content + is expected to be JSON, note that SODA only supports UTF-8, UTF-16LE, + and UTF-16BE encodings. + + The ``key`` parameter should only be supplied if the collection in + which the document is to be placed requires client-assigned keys. + + The ``mediaType`` parameter should only be supplied if the collection + in which the document is to be placed supports non-JSON documents and + the content for this document is non-JSON. Using a standard MIME type + for this value is recommended but any string will be accepted. + """ + doc_impl = self._create_doc_impl(content, key, mediaType) + return SodaDocument._from_impl(doc_impl) + + def getCollectionNames( + self, startName: Optional[str] = None, limit: int = 0 + ) -> list[str]: + """ + Returns a list of the names of collections in the database that match + the criteria, in alphabetical order. + + If the ``startName`` parameter is specified, the list of names returned + will start with this value and also contain any names that fall after + this value in alphabetical order. + + If the ``limit`` parameter is specified and is non-zero, the number of + collection names returned will be limited to this value. + """ + return self._impl.get_collection_names(startName, limit) + + def openCollection(self, name: str) -> "SodaCollection": + """ + Opens an existing collection with the given name and returns a new SODA + collection object. If a collection with that name does not exist, + *None* is returned. + """ + collection_impl = self._impl.open_collection(name) + if collection_impl is not None: + return SodaCollection._from_impl(self, collection_impl) + + +class SodaCollection(metaclass=BaseMetaClass): + @classmethod + def _from_impl(cls, db, impl): + coll = cls.__new__(cls) + coll._db = db + coll._impl = impl + return coll + + def _process_doc_arg(self, arg): + if isinstance(arg, SodaDocument): + return arg._impl + return self._db._create_doc_impl(arg) + + def createIndex(self, spec: Union[dict, str]) -> None: + """ + Creates an index on a SODA collection. + + The ``spec`` parameter is expected to be a dictionary or a JSON-encoded + string. + + Note that a commit should be performed before attempting to create an + index. + """ + if isinstance(spec, dict): + spec = json.dumps(spec) + elif not isinstance(spec, str): + raise TypeError("expecting a dictionary or string") + self._impl.create_index(spec) + + def drop(self) -> bool: + """ + Drops the collection from the database, if it exists. Note that if the + collection was created with ``mapMode`` set to *True*, the underlying + table will not be dropped. + + A boolean value is returned indicating if the collection was actually + dropped. + """ + return self._impl.drop() + + def dropIndex(self, name: str, force: bool = False) -> bool: + """ + Drops the index with the specified name, if it exists. + + The force parameter, if set to *True*, can be used to force the + dropping of an index that the underlying Oracle Database domain index + does not normally permit. This is only applicable to spatial and JSON + search indexes. + + A boolean value is returned indicating if the index was actually + dropped. + """ + return self._impl.drop_index(name, force) + + def find(self) -> "SodaOperation": + """ + Begins an operation that will act upon documents in the collection. It + creates and returns a SodaOperation object which is used to specify the + criteria and the operation that will be performed on the documents that + match that criteria. + """ + return SodaOperation(self) + + def getDataGuide(self) -> "SodaDocument": + """ + Returns a SODA document object containing property names, data types, + and lengths inferred from the JSON documents in the collection. It can + be useful for exploring the schema of a collection. Note that this + method is only supported for JSON-only collections where a JSON search + index has been created with the ‘dataguide’ option enabled. If there + are no documents in the collection, *None* is returned. + """ + doc_impl = self._impl.get_data_guide() + if doc_impl is not None: + return SodaDocument._from_impl(doc_impl) + + def insertMany(self, docs: list) -> None: + """ + Inserts a list of documents into the collection at one time. Each of + the input documents can be a dictionary or list or an existing SODA + document object. + + This method requires Oracle Client 18.5 (or later) and is available + only as a preview. + """ + doc_impls = [self._process_doc_arg(d) for d in docs] + self._impl.insert_many(doc_impls, hint=None, return_docs=False) + + def insertManyAndGet( + self, docs: list, hint: Optional[str] = None + ) -> list["SodaDocument"]: + """ + Similar to :meth:`SodaCollection.insertMany()`, this method inserts a + list of documents into the collection at one time. The only difference + is that it returns a list of SODA Document objects. Note that for + performance reasons the returned documents do not contain the content. + + The ``hint`` parameter, if specified, supplies a hint to the database + when processing the SODA operation. This is expected to be a string in + the same format as SQL hints but without any comment characters, for + example hint="MONITOR". While you could use this to pass any SQL hint, + the hints MONITOR (turn on monitoring) and NO_MONITOR (turn off + monitoring) are the most useful. Use of the ``hint`` parameter requires + Oracle Client 21.3 or later (or Oracle Client 19 from 19.11). + + This method requires Oracle Client 18.5 (or later). + """ + doc_impls = [self._process_doc_arg(d) for d in docs] + if hint is not None and not isinstance(hint, str): + raise TypeError("expecting a string") + return_doc_impls = self._impl.insert_many( + doc_impls, hint, return_docs=True + ) + return [SodaDocument._from_impl(i) for i in return_doc_impls] + + def insertOne(self, doc: Any) -> None: + """ + Inserts a given document into the collection. The input document can be + a dictionary or list or an existing SODA document object. + """ + doc_impl = self._process_doc_arg(doc) + self._impl.insert_one(doc_impl, hint=None, return_doc=False) + + def insertOneAndGet( + self, doc: Any, hint: Optional[str] = None + ) -> "SodaDocument": + """ + Similar to :meth:`~SodaCollection.insertOne()`, this method inserts a + given document into the collection. The only difference is that it + returns a SODA Document object. Note that for performance reasons the + returned document does not contain the content. + + The ``hint`` parameter, if specified, supplies a hint to the database + when processing the SODA operation. This is expected to be a string in + the same format as SQL hints but without any comment characters, for + example hint="MONITOR". While you could use this to pass any SQL hint, + the hints MONITOR (turn on monitoring) and NO_MONITOR (turn off + monitoring) are the most useful. Use of the ``hint`` parameter requires + Oracle Client 21.3 or later (or Oracle Client 19 from 19.11). + """ + doc_impl = self._process_doc_arg(doc) + if hint is not None and not isinstance(hint, str): + raise TypeError("expecting a string") + return_doc_impl = self._impl.insert_one( + doc_impl, hint, return_doc=True + ) + return SodaDocument._from_impl(return_doc_impl) + + def listIndexes(self) -> list: + """ + Returns a list of specifications for the indexes found on the + collection. + + This method requires Oracle Client 21.3 or later (or Oracle Client 19 + from 19.13). + """ + return [json.loads(s) for s in self._impl.list_indexes()] + + @property + def metadata(self) -> dict: + """ + This read-only attribute returns a dictionary containing the metadata + that was used to create the collection. + """ + return json.loads(self._impl.get_metadata()) + + @property + def name(self) -> str: + """ + This read-only attribute returns the name of the collection. + """ + return self._impl.name + + def save(self, doc: Any) -> None: + """ + Saves a document into the collection. This method is equivalent to + :meth:`~SodaCollection.insertOne()` except that if client-assigned keys + are used, and the document with the specified key already exists in the + collection, it will be replaced with the input document. + + This method requires Oracle Client 19.9 (or later) in addition to the + usual SODA requirements. + """ + doc_impl = self._process_doc_arg(doc) + self._impl.save(doc_impl, hint=None, return_doc=False) + + def saveAndGet( + self, doc: Any, hint: Optional[str] = None + ) -> "SodaDocument": + """ + Saves a document into the collection. This method is equivalent to + :meth:`~SodaCollection.insertOneAndGet()` except that if + client-assigned keys are used, and the document with the specified key + already exists in the collection, it will be replaced with the input + document. + + The ``hint`` parameter, if specified, supplies a hint to the database + when processing the SODA operation. This is expected to be a string in + the same format as SQL hints but without any comment characters, for + example hint="MONITOR". While you could use this to pass any SQL hint, + the hints MONITOR (turn on monitoring) and NO_MONITOR (turn off + monitoring) are the most useful. Use of the ``hint`` parameter requires + Oracle Client 21.3 or later (or Oracle Client 19 from 19.11). + + This method requires Oracle Client 19.9 (or later) in addition to the + usual SODA requirements. + """ + doc_impl = self._process_doc_arg(doc) + if hint is not None and not isinstance(hint, str): + raise TypeError("expecting a string") + return_doc_impl = self._impl.save(doc_impl, hint, return_doc=True) + return SodaDocument._from_impl(return_doc_impl) + + def truncate(self) -> None: + """ + Removes all of the documents in the collection, similarly to what is + done for rows in a table by the TRUNCATE TABLE statement. + """ + self._impl.truncate() + + +class SodaDocument(metaclass=BaseMetaClass): + @classmethod + def _from_impl(cls, impl): + doc = cls.__new__(cls) + doc._impl = impl + return doc + + @property + def createdOn(self) -> str: + """ + This read-only attribute returns the creation time of the document in + ISO 8601 format. Documents created by + :meth:`SodaDatabase.createDocument()` or fetched from collections where + this attribute is not stored will return *None*. + """ + return self._impl.get_created_on() + + def getContent(self) -> Union[dict, list]: + """ + Returns the content of the document as a dictionary or list. This + method assumes that the content is application/json and will raise an + exception if this is not the case. If there is no content, however, + *None* will be returned. + """ + content, encoding = self._impl.get_content() + if isinstance(content, bytes) and self.mediaType == "application/json": + return json.loads(content.decode(encoding)) + return content + + def getContentAsBytes(self) -> bytes: + """ + Returns the content of the document as a bytes object. If there is no + content, however, *None* will be returned. + """ + content, encoding = self._impl.get_content() + if isinstance(content, bytes): + return content + elif content is not None: + return str(content).encode() + + def getContentAsString(self) -> str: + """ + Returns the content of the document as a string. If the document + encoding is not known, UTF-8 will be used. If there is no content, + however, *None* will be returned. + """ + content, encoding = self._impl.get_content() + if isinstance(content, bytes): + return content.decode(encoding) + elif content is not None: + return str(content) + + @property + def key(self) -> str: + """ + This read-only attribute returns the unique key assigned to this + document. Documents created by :meth:`SodaDatabase.createDocument()` + may not have a value assigned to them and return *None*. + """ + return self._impl.get_key() + + @property + def lastModified(self) -> str: + """ + This read-only attribute returns the last modified time of the document + in ISO 8601 format. Documents created by + :meth:`SodaDatabase.createDocument()` or fetched from collections where + this attribute is not stored will return *None*. + """ + return self._impl.get_last_modified() + + @property + def mediaType(self) -> str: + """ + This read-only attribute returns the media type assigned to the + document. By convention this is expected to be a MIME type but no + checks are performed on this value. If a value is not specified when + calling :meth:`SodaDatabase.createDocument()` or the document is + fetched from a collection where this component is not stored, the + string “application/json” is returned. + """ + return self._impl.get_media_type() + + @property + def version(self) -> str: + """ + This read-only attribute returns the version assigned to this document. + Documents created by :meth:`SodaDatabase.createDocument()` or fetched + from collections where this attribute is not stored will return *None*. + """ + return self._impl.get_version() + + +class SodaDocCursor(metaclass=BaseMetaClass): + def __iter__(self): + return self + + def __next__(self): + if self._impl is None: + errors._raise_err(errors.ERR_CURSOR_NOT_OPEN) + doc_impl = self._impl.get_next_doc() + if doc_impl is not None: + return SodaDocument._from_impl(doc_impl) + raise StopIteration + + @classmethod + def _from_impl(cls, impl): + cursor = cls.__new__(cls) + cursor._impl = impl + return cursor + + def close(self) -> None: + """ + Closes the cursor now, rather than whenever __del__ is called. The + cursor will be unusable from this point forward; an Error exception + will be raised if any operation is attempted with the cursor. + """ + if self._impl is None: + errors._raise_err(errors.ERR_CURSOR_NOT_OPEN) + self._impl.close() + self._impl = None + + +class SodaOperation(metaclass=BaseMetaClass): + def __init__(self, collection: SodaCollection) -> None: + self._collection = collection + self._key = None + self._keys = None + self._version = None + self._filter = None + self._hint = None + self._skip = None + self._limit = None + self._fetch_array_size = None + self._lock = False + + def count(self) -> int: + """ + Returns a count of the number of documents in the collection that match + the criteria. If :meth:`~SodaOperation.skip()` or + :meth:`~SodaOperation.limit()` were called on this object, an + exception is raised. + """ + return self._collection._impl.get_count(self) + + def fetchArraySize(self, value: int) -> Self: + """ + This is a tuning method to specify the number of documents that are + internally fetched in batches by calls to + :meth:`~SodaOperation.getCursor()` and + :meth:`~SodaOperation.getDocuments()`. It does not affect how many + documents are returned to the application. + + If ``fetchArraySize()`` is not used, or the ``value`` parameter is *0*, + the array size will default to *100*. + + As a convenience, the SodaOperation object is returned so that further + criteria can be specified by chaining methods together. + + This method is only available when using Oracle Client 19.5, or later. + """ + if not isinstance(value, int) or value < 0: + raise TypeError("expecting integer >= 0") + if value == 0: + self._fetch_array_size = None + else: + self._fetch_array_size = value + return self + + def filter(self, value: Union[dict, str]) -> Self: + """ + Sets a filter specification for complex document queries and ordering + of JSON documents. Filter specifications must be provided as a + dictionary or JSON-encoded string and can include comparisons, regular + expressions, logical and spatial operators, among others. + + As a convenience, the SodaOperation object is returned so that further + criteria can be specified by chaining methods together. + """ + if isinstance(value, dict): + self._filter = json.dumps(value) + elif isinstance(value, str): + self._filter = value + else: + raise TypeError("expecting string or dictionary") + return self + + def getCursor(self) -> "SodaDocCursor": + """ + Returns a SodaDocCursor object that can be used to iterate over the + documents that match the criteria. + """ + impl = self._collection._impl.get_cursor(self) + return SodaDocCursor._from_impl(impl) + + def getDocuments(self) -> list["SodaDocument"]: + """ + Returns a list of SodaDocument objects that match the criteria. + """ + return [d for d in self.getCursor()] + + def getOne(self) -> Union["SodaDocument", None]: + """ + Returns a single SodaDocument object that matches the criteria. Note + that if multiple documents match the criteria only the first one is + returned. + """ + doc_impl = self._collection._impl.get_one(self) + if doc_impl is not None: + return SodaDocument._from_impl(doc_impl) + + def hint(self, value: str) -> Self: + """ + Specifies a hint that will be provided to the SODA operation when it is + performed. This is expected to be a string in the same format as SQL + hints but without any comment characters. While you could use this to + pass any SQL hint, the hints MONITOR (turn on monitoring) and + NO_MONITOR (turn off monitoring) are the most useful. Use of this + method requires Oracle Client 21.3 or later (or Oracle Client 19 from + 19.11). + + As a convenience, the SodaOperation object is returned so that further + criteria can be specified by chaining methods together. + """ + if not isinstance(value, str): + raise TypeError("expecting a string") + self._hint = value + return self + + def lock(self) -> Self: + """ + Specifies whether the documents fetched from the collection should be + locked (equivalent to SQL "select for update"). Use of this method + requires Oracle Client 21.3 or later (or Oracle Client 19 from 19.11). + + The next commit or rollback on the connection made after the operation + is performed will "unlock" the documents. Ensure that the connection is + not in autocommit mode or the documents will be unlocked immediately + after the operation is complete. + + This method should only be used with read operations (other than + :func:`~SodaOperation.count()`) and should not be used in conjunction + with non-terminal methods :meth:`~SodaOperation.skip()` and + :meth:`~SodaOperation.limit()`. + + If this method is specified in conjunction with a write operation, this + method is ignored. + + As a convenience, the SodaOperation object is returned so that further + criteria can be specified by chaining methods together. + """ + self._lock = True + return self + + def key(self, value: str) -> Self: + """ + Specifies that the document with the specified key should be returned. + This causes any previous calls made to this method and + :meth:`~SodaOperation.keys()` to be ignored. + + As a convenience, the SodaOperation object is returned so that further + criteria can be specified by chaining methods together. + """ + if not isinstance(value, str): + raise TypeError("expecting string") + self._key = value + self._keys = None + return self + + def keys(self, value: list) -> Self: + """ + Specifies that documents that match the keys found in the supplied + sequence should be returned. This causes any previous calls made to + this method and :meth:`~SodaOperation.key()` to be ignored. + + As a convenience, the SodaOperation object is returned so that further + criteria can be specified by chaining methods together. + """ + value_as_list = list(value) + for element in value_as_list: + if not isinstance(element, str): + raise TypeError("expecting sequence of strings") + self._keys = value_as_list + self._key = None + return self + + def limit(self, value: int) -> Self: + """ + Specifies that only the specified number of documents should be + returned. This method is only usable for read operations such as + :meth:`~SodaOperation.getCursor()` and + :meth:`~SodaOperation.getDocuments()`. For write operations, any value + set using this method is ignored. + + As a convenience, the SodaOperation object is returned so that further + criteria can be specified by chaining methods together. + """ + if not isinstance(value, int) or value <= 0: + raise TypeError("expecting positive integer") + self._limit = value + return self + + def remove(self) -> int: + """ + Removes all of the documents in the collection that match the criteria. + The number of documents that have been removed is returned. + """ + return self._collection._impl.remove(self) + + def replaceOne(self, doc: Any) -> bool: + """ + Replaces a single document in the collection with the specified + document. The input document can be a dictionary or list or an existing + SODA document object. A boolean indicating if a document was replaced + or not is returned. + + Currently, the method :meth:`~SodaOperation.key()` must be called + before this method can be called. + """ + doc_impl = self._collection._process_doc_arg(doc) + return self._collection._impl.replace_one( + self, doc_impl, return_doc=False + ) + + def replaceOneAndGet(self, doc: Any) -> "SodaDocument": + """ + Similar to :meth:`~SodaOperation.replaceOne()`, this method replaces a + single document in the collection with the specified document. The only + difference is that it returns a SodaDocument object. Note that for + performance reasons the returned document does not contain the content. + """ + doc_impl = self._collection._process_doc_arg(doc) + return_doc_impl = self._collection._impl.replace_one( + self, doc_impl, return_doc=True + ) + return SodaDocument._from_impl(return_doc_impl) + + def skip(self, value: int) -> Self: + """ + Specifies the number of documents that match the other criteria that + will be skipped. This method is only usable for read operations such as + :meth:`~SodaOperation.getOne()`, :meth:`~SodaOperation.getCursor()`, + and :meth:`~SodaOperation.getDocuments()`. For write operations, any + value set using this method is ignored. + + As a convenience, the SodaOperation object is returned so that further + criteria can be specified by chaining methods together. + """ + if not isinstance(value, int) or value < 0: + raise TypeError("expecting integer >= 0") + self._skip = value + return self + + def version(self, value: str) -> Self: + """ + Specifies that documents with the specified version should be returned. + Typically this is used with :meth:`~SodaOperation.key()` to implement + optimistic locking, so that the write operation called later does not + affect a document that someone else has modified. + + As a convenience, the SodaOperation object is returned so that further + criteria can be specified by chaining methods together. + """ + if not isinstance(value, str): + raise TypeError("expecting string") + self._version = value + return self diff --git a/.venv/lib/python3.9/site-packages/oracledb/sparse_vector.py b/.venv/lib/python3.9/site-packages/oracledb/sparse_vector.py new file mode 100644 index 0000000..bb6b35a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/sparse_vector.py @@ -0,0 +1,114 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2024, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# sparse_vector.py +# +# Contains the SparseVector class which stores information about a sparse +# vector. Sparse vectors are available in Oracle Database 23.6 and higher. +# ----------------------------------------------------------------------------- + +import array +from typing import Union + +from .base import BaseMetaClass +from .base_impl import get_array_type_code_uint32, SparseVectorImpl + +ARRAY_TYPE_CODE_UINT32 = get_array_type_code_uint32() + + +class SparseVector(metaclass=BaseMetaClass): + """ + Provides information about sparse vectors. + """ + + def __init__( + self, + num_dimensions: int, + indices: Union[list, array.array], + values: Union[list, array.array], + ): + """ + Creates and returns a :ref:`SparseVector object `. + + The ``num_dimensions`` parameter is the number of dimensions contained + in the vector. + + The ``indices`` parameter is the indices (zero-based) of non-zero + values in the vector. + + The ``values`` parameter is the non-zero values stored in the vector. + """ + if ( + not isinstance(indices, array.array) + or indices.typecode != ARRAY_TYPE_CODE_UINT32 + ): + indices = array.array(ARRAY_TYPE_CODE_UINT32, indices) + if not isinstance(values, array.array): + values = array.array("d", values) + if len(indices) != len(values): + raise TypeError("indices and values must be of the same length!") + self._impl = SparseVectorImpl.from_values( + num_dimensions, indices, values + ) + + def __repr__(self): + cls_name = self.__class__._public_name + return ( + f"{cls_name}({self.num_dimensions}, " + f"{self.indices}, {self.values})" + ) + + def __str__(self): + return ( + f"[{self.num_dimensions}, {list(self.indices)}, " + f"{list(self.values)}]" + ) + + @classmethod + def _from_impl(cls, impl): + vector = cls.__new__(cls) + vector._impl = impl + return vector + + @property + def indices(self) -> array.array: + """ + Returns the indices (zero-based) of non-zero values in the vector. + """ + return self._impl.indices + + @property + def num_dimensions(self) -> int: + """ + Returns the number of dimensions contained in the vector. + """ + return self._impl.num_dimensions + + @property + def values(self) -> array.array: + """ + Returns the non-zero values stored in the vector. + """ + return self._impl.values diff --git a/.venv/lib/python3.9/site-packages/oracledb/subscr.py b/.venv/lib/python3.9/site-packages/oracledb/subscr.py new file mode 100644 index 0000000..6e0c9d8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/subscr.py @@ -0,0 +1,378 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# subscr.py +# +# Contains the Subscription class and Message classes used for managing +# subscriptions to database events and the messages that are sent when those +# events are detected. +# ----------------------------------------------------------------------------- + +from typing import Callable, Optional, Union +from .base import BaseMetaClass +from . import connection + + +class Subscription(metaclass=BaseMetaClass): + def __repr__(self): + return f"" + + @classmethod + def _from_impl(cls, impl): + subscr = cls.__new__(cls) + subscr._impl = impl + return subscr + + @property + def callback(self) -> Callable: + """ + This read-only attribute returns the callback that was registered when + the subscription was created. + """ + return self._impl.callback + + @property + def connection(self) -> "connection.Connection": + """ + This read-only attribute returns the connection that was used to + register the subscription when it was created. + """ + return self._impl.connection + + @property + def id(self) -> int: + """ + This read-only attribute returns the value of REGID found in the + database view USER_CHANGE_NOTIFICATION_REGS or the value of REG_ID + found in the database view USER_SUBSCR_REGISTRATIONS. For AQ + subscriptions, this value is *0*. + """ + return self._impl.id + + @property + def ip_address(self) -> str: + """ + This read-only attribute returns the IP address used for callback + notifications from the database server. If not set during construction, + this value is *None*. + """ + return self._impl.ip_address + + @property + def ipAddress(self) -> str: + """ + Deprecated. Use property ip_address instead. + """ + return self.ip_address + + @property + def name(self) -> str: + """ + This read-only attribute returns the name used to register the + subscription when it was created. + """ + return self._impl.name + + @property + def namespace(self) -> int: + """ + This read-only attribute returns the namespace used to register the + subscription when it was created. + """ + return self._impl.namespace + + @property + def operations(self) -> int: + """ + This read-only attribute returns the operations that will send + notifications for each table or query that is registered using this + subscription. + """ + return self._impl.operations + + @property + def port(self) -> int: + """ + This read-only attribute returns the port used for callback + notifications from the database server. If not set during + construction, this value is *0*. + """ + return self._impl.port + + @property + def protocol(self) -> int: + """ + This read-only attribute returns the protocol used to register the + subscription when it was created. + """ + return self._impl.protocol + + @property + def qos(self) -> int: + """ + This read-only attribute returns the quality of service flags used to + register the subscription when it was created. + """ + return self._impl.qos + + def registerquery( + self, statement: str, args: Optional[Union[list, dict]] = None + ) -> int: + """ + Registers the query for subsequent notification when tables referenced + by the query are changed. This behaves similarly to + :meth:`Cursor.execute()` but only queries are permitted and the + ``args`` parameter, if specified, must be a sequence or dictionary. If + the ``qos`` parameter included the flag + :data:`oracledb.SUBSCR_QOS_QUERY` when the subscription was created, + then the ID for the registered query is returned; otherwise, *None* is + returned. + """ + if args is not None and not isinstance(args, (list, dict)): + raise TypeError("expecting args to be a dictionary or list") + return self._impl.register_query(statement, args) + + @property + def timeout(self) -> int: + """ + This read-only attribute returns the timeout (in seconds) that was + specified when the subscription was created. A value of *0* indicates + that there is no timeout. + """ + return self._impl.timeout + + +class Message(metaclass=BaseMetaClass): + def __init__(self, subscription: Subscription) -> None: + self._subscription = subscription + self._consumer_name = None + self._dbname = None + self._queries = [] + self._queue_name = None + self._registered = False + self._tables = [] + self._txid = None + self._type = 0 + self._msgid = None + + @property + def consumer_name(self) -> Union[str, None]: + """ + This read-only attribute returns the name of the consumer which + generated the notification. It will be populated if the + subscription was created with the namespace + :data:`oracledb.SUBSCR_NAMESPACE_AQ` and the queue is a multiple + consumer queue. + """ + return self._consumer_name + + @property + def consumerName(self) -> Union[str, None]: + """ + Deprecated. Use property consumer_name instead. + """ + return self.consumer_name + + @property + def dbname(self) -> Union[str, None]: + """ + This read-only attribute returns the name of the database that + generated the notification. + """ + return self._dbname + + @property + def msgid(self) -> Union[bytes, None]: + """ + This read-only attribute returns the message id of the AQ message that + generated the notification. It will only be populated if the + subscription was created with the namespace + :data:`oracledb.SUBSCR_NAMESPACE_AQ`. + """ + return self._msgid + + @property + def queries(self) -> list["MessageQuery"]: + """ + This read-only attribute returns a list of message query objects that + give information about query result sets changed for this notification. + This attribute will be an empty list if the ``qos`` parameter did not + include the flag :data:`~oracledb.SUBSCR_QOS_QUERY` when the + subscription was created. + """ + return self._queries + + @property + def queue_name(self) -> Union[str, None]: + """ + This read-only attribute returns the name of the queue which generated + the notification. It will only be populated if the subscription was + created with the namespace :data:`oracledb.SUBSCR_NAMESPACE_AQ`. + """ + return self._queue_name + + @property + def queueName(self) -> Union[str, None]: + """ + Deprecated. Use property queue_name instead. + """ + return self.queue_name + + @property + def registered(self) -> bool: + """ + This read-only attribute returns whether the subscription which + generated this notification is still registered with the database. The + subscription is automatically deregistered with the database when the + subscription timeout value is reached or when the first notification is + sent (when the quality of service flag + :data:`oracledb.SUBSCR_QOS_DEREG_NFY` is used). + """ + return self._registered + + @property + def subscription(self) -> Subscription: + """ + This read-only attribute returns the subscription object for which this + notification was generated. + """ + return self._subscription + + @property + def tables(self) -> list["MessageTable"]: + """ + This read-only attribute returns a list of message table objects that + give information about the tables changed for this notification. This + attribute will be an empty list if the ``qos`` parameter included the + flag :data:`~oracledb.SUBSCR_QOS_QUERY` when the subscription was + created. + """ + return self._tables + + @property + def txid(self) -> Union[bytes, None]: + """ + This read-only attribute returns the id of the transaction that + generated the notification. + """ + return self._txid + + @property + def type(self) -> int: + """ + This read-only attribute returns the type of message that has been + sent. + """ + return self._type + + +class MessageQuery(metaclass=BaseMetaClass): + def __init__(self) -> None: + self._id = 0 + self._operation = 0 + self._tables = [] + + @property + def id(self) -> int: + """ + This read-only attribute returns the query id of the query for which + the result set changed. The value will match the value returned by + :meth:`Subscription.registerquery()` when the related query was + registered. + """ + return self._id + + @property + def operation(self) -> int: + """ + This read-only attribute returns the operation that took place on the + query result set that was changed. Valid values for this attribute are + :data:`~oracledb.EVENT_DEREG` and :data:`~oracledb.EVENT_QUERYCHANGE`. + """ + return self._operation + + @property + def tables(self) -> list["MessageTable"]: + """ + This read-only attribute returns a list of message table objects that + give information about the table changes that caused the query result + set to change for this notification. + """ + return self._tables + + +class MessageRow(metaclass=BaseMetaClass): + def __init__(self) -> None: + self._operation = 0 + self._rowid = None + + @property + def operation(self) -> int: + """ + This read-only attribute returns the operation that took place on the + row that was changed. + """ + return self._operation + + @property + def rowid(self) -> Union[str, None]: + """ + This read-only attribute returns the rowid of the row that was changed. + """ + return self._rowid + + +class MessageTable(metaclass=BaseMetaClass): + def __init__(self) -> None: + self._name = None + self._operation = 0 + self._rows = [] + + @property + def name(self) -> Union[str, None]: + """ + This read-only attribute returns the name of the table that was + changed. + """ + return self._name + + @property + def operation(self) -> int: + """ + This read-only attribute returns the operation that took place on the + table that was changed. + """ + return self._operation + + @property + def rows(self) -> list["MessageRow"]: + """ + This read-only attribute returns a list of message row objects that + give information about the rows changed on the table. This value is + only filled in if the ``qos`` parameter to the + :meth:`Connection.subscribe()` method included the flag + :data:`~oracledb.SUBSCR_QOS_ROWIDS`. + """ + return self._rows diff --git a/.venv/lib/python3.9/site-packages/oracledb/thick_impl.cpython-39-x86_64-linux-gnu.so b/.venv/lib/python3.9/site-packages/oracledb/thick_impl.cpython-39-x86_64-linux-gnu.so new file mode 100755 index 0000000..9fc7e99 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/thick_impl.cpython-39-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/thin_impl.cpython-39-x86_64-linux-gnu.so b/.venv/lib/python3.9/site-packages/oracledb/thin_impl.cpython-39-x86_64-linux-gnu.so new file mode 100755 index 0000000..88fe803 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/oracledb/thin_impl.cpython-39-x86_64-linux-gnu.so differ diff --git a/.venv/lib/python3.9/site-packages/oracledb/utils.py b/.venv/lib/python3.9/site-packages/oracledb/utils.py new file mode 100644 index 0000000..77048b7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/utils.py @@ -0,0 +1,392 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2020, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# utils.py +# +# Contains utility classes and methods. +# ----------------------------------------------------------------------------- + +import functools +from typing import Any, Callable, Optional, Union +import uuid + +from .arrow_array import ArrowArray +from .dataframe import DataFrame + +from . import base_impl +from . import driver_mode +from . import errors +from . import thick_impl + + +def clientversion() -> tuple: + """ + This function can only be called when python-oracledb is in Thick mode. + Using it in Thin mode will throw an exception. + """ + return thick_impl.clientversion() + + +def enable_thin_mode(): + """ + Makes python-oracledb be in Thin mode. After this method is called, Thick + mode cannot be enabled. If python-oracledb is already in Thick mode, then + calling ``enable_thin_mode()`` will fail. If Thin mode connections have + already been opened, or a connection pool created in Thin mode, then + calling ``enable_thin_mode()`` is a no-op. + + Since python-oracledb defaults to Thin mode, almost all applications do not + need to call this method. However, because it bypasses python-oracledb's + internal mode-determination heuristic, it may be useful for applications + with multiple threads that concurrently create :ref:`standalone connections + ` when the application starts. + """ + with driver_mode.get_manager(requested_thin_mode=True): + pass + + +def from_arrow(obj: Any) -> Union[DataFrame, ArrowArray]: + """ + This method converts a data frame to a + :ref:`DataFrame ` or + :ref:`ArrowArray ` instance. + + If ``obj`` supports the Arrow PyCapsule interface ``__arrow_c_stream__`` + method, then ``from_arrow()`` returns the instance as a :ref:`DataFrame + `. If ``obj`` does not support that method, but does + support ``__arrow_c_array__``, then an :ref:`ArrowArray + ` is returned. + """ + if hasattr(obj, "__arrow_c_stream__"): + return DataFrame._from_arrow(obj) + elif hasattr(obj, "__arrow_c_array__"): + return ArrowArray._from_arrow(obj) + msg = "object must implement the PyCapsule stream or array interfaces" + raise ValueError(msg) + + +def init_oracle_client( + lib_dir: Optional[Union[str, bytes]] = None, + config_dir: Optional[Union[str, bytes]] = None, + error_url: Optional[str] = None, + driver_name: Optional[str] = None, +): + """ + Enables python-oracledb Thick mode by initializing the Oracle Client + library, see :ref:`enablingthick`. If a standalone connection or pool has + already been created in Thin mode, ``init_oracle_client()`` will raise an + exception and python-oracledb will remain in Thin mode. + + If a standalone connection or pool has *not* already been created in Thin + mode, but ``init_oracle_client()`` raises an exception, python-oracledb + will remain in Thin mode but further calls to ``init_oracle_client()`` can + be made, if desired. + + The ``init_oracle_client()`` method can be called multiple times in each + Python process as long as the arguments are the same each time. + + The ``lib_dir`` parameter is a string or a bytes object that specifies the + directory containing Oracle Client libraries. If the ``lib_dir`` parameter + is set, then the specified directory is the only one searched for the + Oracle Client libraries; otherwise, the operating system library search + path is used to locate the Oracle Client library. If you are using Python + 3.11 and later, then the value specified in this parameter is encoded + using `locale.getencoding() `__. For all other Python versions, the encoding + "utf-8" is used. If a bytes object is specified in this parameter, then + this value will be used as is without any encoding. + + The ``config_dir`` parameter is a string or a bytes object that specifies + the directory in which the + :ref:`Optional Oracle Net Configuration ` and + :ref:`Optional Oracle Client Configuration ` files reside. + If the ``config_dir`` parameter is set, then the specified directory is + used to find Oracle Client library configuration files. This is + equivalent to setting the environment variable ``TNS_ADMIN`` and overrides + any value already set in ``TNS_ADMIN``. If this parameter is not set, the + :ref:`Oracle standard ` way of locating Oracle Client + library configuration files is used. If you are using Python 3.11 and + later, then the value specified in this parameter is encoded using + `locale.getencoding() `__. For all other Python versions, the encoding + "utf-8" is used. If a bytes object is specified in this parameter, then + this value will be used as is without any encoding. + + The ``error_url`` parameter is a string that specifies the URL which is + included in the python-oracledb exception message if the Oracle Client + libraries cannot be loaded. If the ``error_url`` parameter is set, then + the specified value is included in the message of the exception raised + when the Oracle Client library cannot be loaded; otherwise, the + :ref:`installation` URL is included. This parameter lets your application + display custom installation instructions. + + The ``driver_name`` parameter is a string that specifies the driver name + value. If the ``driver_name`` parameter is set, then the specified value + can be found in database views that give information about connections. + For example, it is in the CLIENT_DRIVER column of the + V$SESSION_CONNECT_INFO view. From Oracle Database 12.2, the name displayed + can be 30 characters. The standard is to set this value to ``" : + version>"``, where is the name of the driver and is its + version. There should be a single space character before and after the + colon. If this parameter is not set, then the value specified in + :attr:`oracledb.defaults.driver_name ` is used. If + the value of this attribute is *None*, then the default value in + python-oracledb Thick mode is like "python-oracledb thk : ". See + :ref:`otherinit`. + + At successful completion of a call to ``oracledb.init_oracle_client()``, + the attribute :attr:`oracledb.defaults.config_dir ` + will be set as determined below (first one wins): + + - the value of the ``oracledb.init_oracle_client()`` parameter + ``config_dir``, if one was passed. + + - the value of :attr:`oracledb.defaults.config_dir ` + if it has one. i.e. + :attr:`oracledb.defaults.config_dir ` remains + unchanged after ``oracledb.init_oracle_client()`` completes. + + - the value of the environment variable ``$TNS_ADMIN``, if it is set. + + - the value of ``$ORACLE_HOME/network/admin`` if the environment variable + ``$ORACLE_HOME`` is set. + + - the directory of the loaded Oracle Client library, appended with + ``network/admin``. Note this directory is not determinable on AIX. + + - otherwise the value *None* is used. (Leaving + :attr:`oracledb.defaults.config_dir ` unchanged). + """ + thick_impl.init_oracle_client(lib_dir, config_dir, error_url, driver_name) + + +def normalize_sessionless_transaction_id( + value: Optional[Union[bytes, str]] = None, +) -> bytes: + """ + Normalize and validate the transaction_id. + + - If `value` is a string, it's UTF-8 encoded. + - If `value` is None, a UUID4-based transaction_id is generated. + - If `value` is not str/bytes/None, raises TypeError. + - If transaction_id exceeds 64 bytes, raises ValueError. + + Returns: + bytes: Normalized transaction_id + """ + if value is None: + value = uuid.uuid4().bytes + elif isinstance(value, str): + value = value.encode("utf-8") + elif not isinstance(value, bytes): + raise TypeError("invalid transaction_id: must be str, bytes, or None") + + if len(value) > 64: + raise ValueError( + f"transaction_id size exceeds 64 bytes (got {len(value)})" + ) + + return value + + +def params_initer(f): + """ + Decorator function which is used on the ConnectParams and PoolParams + classes. It creates the implementation object using the implementation + class stored on the parameter class. It first, however, calls the original + method to ensure that the keyword parameters supplied are valid (the + original method itself does nothing). + """ + + @functools.wraps(f) + def wrapped_f(self, *args, **kwargs): + f(self, *args, **kwargs) + self._impl = self._impl_class() + if kwargs: + self._impl.set(kwargs) + + return wrapped_f + + +def params_setter(f): + """ + Decorator function which is used on the ConnectParams and PoolParams + classes. It calls the set() method on the parameter implementation object + with the supplied keyword arguments. It first, however, calls the original + method to ensure that the keyword parameters supplied are valid (the + original method itself does nothing). + """ + + @functools.wraps(f) + def wrapped_f(self, *args, **kwargs): + f(self, *args, **kwargs) + self._impl.set(kwargs) + + return wrapped_f + + +def register_params_hook(hook_function: Callable) -> None: + """ + Registers a user parameter hook function that will be called internally by + python-oracledb prior to connection or pool creation. The hook function + accepts a copy of the parameters that will be used to create the pool or + standalone connection and may modify them. For example, the cloud native + authentication plugins modify the "access_token" parameter with a function + that will acquire the token using information found in the + "extra_auth_parms" parameter. + + Multiple hooks may be registered. They will be invoked in order of + registration. + """ + if hook_function is None or not callable(hook_function): + raise TypeError("hook_function must be a callable and cannot be None") + base_impl.REGISTERED_PARAMS_HOOKS.append(hook_function) + + +def register_password_type( + password_type: str, hook_function: Callable +) -> None: + """ + Registers a user password hook function that will be called internally by + python-oracledb when a password is supplied as a dictionary containing the + given ``password_type`` as the key "type". The hook function is called for + passwords specified as the ``password``, ``newpassword`` and + ``wallet_parameter`` parameters in calls to :meth:`oracledb.connect()`, + :meth:`oracledb.create_pool()`, :meth:`oracledb.connect_async()`, and + :meth:`oracledb.create_pool_async()`. + + Your hook function is expected to accept the dictionary supplied by the + application and return the valid password. + + Calling :meth:`~oracledb.register_password_type()` with the + ``hook_function`` parameter set to *None* will result in a previously + registered user function being removed and the default behavior restored. + """ + if not isinstance(password_type, str): + raise TypeError("password_type must be a string") + if hook_function is not None and not callable(hook_function): + raise TypeError("hook_function must be a callable") + password_type = password_type.lower() + if hook_function is None: + base_impl.REGISTERED_PASSWORD_TYPES.pop(password_type) + else: + base_impl.REGISTERED_PASSWORD_TYPES[password_type] = hook_function + + +def register_protocol(protocol: str, hook_function: Callable) -> None: + """ + Registers a user protocol hook function that will be called internally by + python-oracledb Thin mode prior to connection or pool creation. The hook + function will be invoked when :func:`oracledb.connect`, + :func:`oracledb.create_pool`, :meth:`oracledb.connect_async()`, or + :meth:`oracledb.create_pool_async()` are called with a ``dsn`` parameter + value prefixed with the specified protocol. The user function will also be + invoked when :meth:`ConnectParams.parse_connect_string()` is called in Thin + or Thick modes with a similar ``connect_string`` parameter value. + + Your hook function is expected to construct valid connection details. For + example, if a hook function is registered for the "ldaps" protocol, then + calling :func:`oracledb.connect` with a connection string prefixed with + "ldaps://" will invoke the function. The function can then perform LDAP + lookup to retrieve and set the actual database information that will be + used internally by python-oracledb to complete the connection creation. + + The ``protocol`` parameter is a string that will be matched against the + prefix appearing before "://" in connection strings. + + The ``hook_function`` parameter should be a function with the signature:: + + hook_function(protocol, protocol_arg, params) + + The hook function will be called with the following arguments: + + - The ``protocol`` parameter is the value that was registered. + + - The ``protocol_arg`` parameter is the section after "://" in the + connection string used in the connection or pool creation call, or passed + to :meth:`~ConnectParams.parse_connect_string()`. + + - The ``params`` parameter is an instance of :ref:`ConnectParams + `. + + When your hook function is invoked internally prior to connection or pool + creation, ``params`` will be the ConnectParams instance originally passed + to the :func:`oracledb.connect`, :func:`oracledb.create_pool`, + :meth:`oracledb.connect_async()`, or :meth:`oracledb.create_pool_async()` + call, if such an instance was passed. Otherwise it will be a new + ConnectParams instance. The hook function should parse ``protocol`` and + ``protocol_arg`` and take any desired action to update ``params`` + :ref:`attributes ` with appropriate connection + parameters. Attributes can be set using :meth:`ConnectParams.set()` or + :meth:`ConnectParams.parse_connect_string()`. The ConnectParams instance + will then be used to complete the connection or pool creation. + + When your hook function is invoked by + :meth:`ConnectParams.parse_connect_string()`, then ``params`` will be the + invoking ConnectParams instance that you can update using + :meth:`ConnectParams.set()` or + :meth:`ConnectParams.parse_connect_string()`. + + Internal hook functions for the "tcp" and "tcps" protocols are + pre-registered but can be overridden if needed. If any other protocol has + not been registered, then connecting will result in the error ``DPY-4021: + invalid protocol``. + + Calling :meth:`~oracledb.register_protocol()` with the ``hook_function`` + parameter set to *None* will result in a previously registered user + function being removed and the default behavior restored. + """ + if not isinstance(protocol, str): + raise TypeError("protocol must be a string") + if hook_function is not None and not callable(hook_function): + raise TypeError("hook_function must be a callable") + protocol = protocol.lower() + if hook_function is None: + base_impl.REGISTERED_PROTOCOLS.pop(protocol) + else: + base_impl.REGISTERED_PROTOCOLS[protocol] = hook_function + + +def unregister_params_hook(hook_function: Callable) -> None: + """ + Unregisters a user parameter function that was earlier registered with a + call to :meth:`oracledb.register_params_hook()`. + """ + base_impl.REGISTERED_PARAMS_HOOKS.remove(hook_function) + + +def verify_stored_proc_args( + parameters: Union[list, tuple], keyword_parameters: dict +) -> None: + """ + Verifies that the arguments to a call to a stored procedure or function + are acceptable. + """ + if parameters is not None and not isinstance(parameters, (list, tuple)): + errors._raise_err(errors.ERR_ARGS_MUST_BE_LIST_OR_TUPLE) + if keyword_parameters is not None and not isinstance( + keyword_parameters, dict + ): + errors._raise_err(errors.ERR_KEYWORD_ARGS_MUST_BE_DICT) diff --git a/.venv/lib/python3.9/site-packages/oracledb/var.py b/.venv/lib/python3.9/site-packages/oracledb/var.py new file mode 100644 index 0000000..85e6836 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/var.py @@ -0,0 +1,186 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2025, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# var.py +# +# Contains the Var class used for managing variables used during bind and +# fetch. These hold the metadata as well as any necessary buffers. +# ----------------------------------------------------------------------------- + +from typing import Any, Callable, Optional, Union +from .dbobject import DbObjectType +from .base import BaseMetaClass +from .base_impl import DbType + + +class Var(metaclass=BaseMetaClass): + def __repr__(self): + value = self._impl.get_all_values() + if not self._impl.is_array and len(value) == 1: + value = value[0] + typ = self._type + return f"" + + @classmethod + def _from_impl(cls, impl, typ=None): + var = cls.__new__(cls) + var._impl = impl + if typ is not None: + var._type = typ + elif impl.metadata.objtype is not None: + var._type = DbObjectType._from_impl(impl.metadata.objtype) + else: + var._type = impl.metadata.dbtype + return var + + @property + def actual_elements(self) -> int: + """ + This read-only attribute returns the actual number of elements in the + variable. This corresponds to the number of elements in a PL/SQL + index-by table for variables that are created using the method + :meth:`Cursor.arrayvar()`. For all other variables, this value will be + identical to the attribute num_elements. + """ + if self._impl.is_array: + return self._impl.num_elements_in_array + return self._impl.num_elements + + @property + def actualElements(self) -> int: + """ + Deprecated. Use property actual_elements instead. + """ + return self.actual_elements + + @property + def buffer_size(self) -> int: + """ + This read-only attribute returns the size of the buffer allocated for + each element in bytes. + """ + return self._impl.metadata.buffer_size + + @property + def bufferSize(self) -> int: + """ + Deprecated. Use property buffer_size intead(). + """ + return self.buffer_size + + @property + def convert_nulls(self) -> bool: + """ + This read-only attribute returns whether null values are converted + using the supplied ``outconverter``. + """ + return self._impl.convert_nulls + + def getvalue(self, pos: int = 0) -> Any: + """ + Returns the value at the given position in the variable. For variables + created using the method :meth:`Cursor.arrayvar()`, the value returned + will be a list of each of the values in the PL/SQL index-by table. For + variables bound to DML returning statements, the value returned will + also be a list corresponding to the returned data for the given + execution of the statement (as identified by the ``pos`` parameter). + """ + return self._impl.get_value(pos) + + @property + def inconverter(self) -> Optional[Callable]: + """ + This read-only attribute specifies the method used to convert data from + Python to the Oracle database. The method signature is converter(value) + and the expected return value is the value to bind to the database. If + this attribute is *None*, the value is bound directly without any + conversion. + """ + return self._impl.inconverter + + @property + def num_elements(self) -> int: + """ + This read-only attribute returns the number of elements allocated in an + array, or the number of scalar items that can be fetched into the + variable or bound to the variable. + """ + return self._impl.num_elements + + @property + def numElements(self) -> int: + """ + Deprecated. Use property num_elements instead. + """ + return self.num_elements + + @property + def outconverter(self) -> Optional[Callable]: + """ + This read-only attribute specifies the method used to convert data from + the Oracle database to Python. The method signature is converter(value) + and the expected return value is the value to return to Python. If this + attribute is *None*, the value is returned directly without any + conversion. + """ + return self._impl.outconverter + + def setvalue(self, pos: int, value: Any) -> None: + """ + Sets the value at the given position in the variable. + """ + self._impl.set_value(pos, value) + + @property + def size(self) -> int: + """ + This read-only attribute returns the size of the variable. For strings + this value is the size in characters. For all others, this is the same + value as the attribute buffer_size. + """ + return self._impl.metadata.max_size + + @property + def type(self) -> Union[DbType, DbObjectType]: + """ + This read-only attribute returns the type of the variable. This will be + an :ref:`Oracle Object Type ` if the variable binds + Oracle objects; otherwise, it will be one of the + :ref:`database type constants `. + + Database type constants are now used when the variable is not used for + binding Oracle objects. + """ + return self._type + + @property + def values(self) -> list: + """ + This read-only attribute returns a copy of the value of all actual + positions in the variable as a list. This is the equivalent of calling + getvalue() for each valid position and the length will correspond to + the value of the actual_elements attribute. + """ + return self._impl.get_all_values() diff --git a/.venv/lib/python3.9/site-packages/oracledb/version.py b/.venv/lib/python3.9/site-packages/oracledb/version.py new file mode 100644 index 0000000..6374d44 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/oracledb/version.py @@ -0,0 +1,33 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2021, 2026, Oracle and/or its affiliates. +# +# This software is dual-licensed to you under the Universal Permissive License +# (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl and Apache License +# 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose +# either license. +# +# If you elect to accept the software under the Apache License, Version 2.0, +# the following applies: +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# version.py +# +# Defines the version of the package. This is the only place where this is +# found. The setup.cfg configuration file and the documentation configuration +# file doc/src/conf.py both reference this file directly. +# ----------------------------------------------------------------------------- + +__version__ = "3.4.2" diff --git a/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/METADATA b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/METADATA new file mode 100644 index 0000000..d57a39b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/METADATA @@ -0,0 +1,88 @@ +Metadata-Version: 2.4 +Name: paramiko +Version: 4.0.0 +Summary: SSH2 protocol library +Author-email: Jeff Forcier +License-Expression: LGPL-2.1 +Project-URL: Docs, https://docs.paramiko.org +Project-URL: Source, https://github.com/paramiko/paramiko +Project-URL: Changelog, https://www.paramiko.org/changelog.html +Project-URL: CI, https://app.circleci.com/pipelines/github/paramiko/paramiko +Project-URL: Issues, https://github.com/paramiko/paramiko/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: Topic :: Internet +Classifier: Topic :: Security :: Cryptography +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: bcrypt>=3.2 +Requires-Dist: cryptography>=3.3 +Requires-Dist: invoke>=2.0 +Requires-Dist: pynacl>=1.5 +Provides-Extra: gssapi +Requires-Dist: pyasn1>=0.1.7; extra == "gssapi" +Requires-Dist: gssapi>=1.4.1; platform_system != "Windows" and extra == "gssapi" +Requires-Dist: pywin32>=2.1.8; platform_system == "Windows" and extra == "gssapi" +Dynamic: license-file + +|version| |python| |license| |ci| |coverage| + +.. |version| image:: https://img.shields.io/pypi/v/paramiko + :target: https://pypi.org/project/paramiko/ + :alt: PyPI - Package Version +.. |python| image:: https://img.shields.io/pypi/pyversions/paramiko + :target: https://pypi.org/project/paramiko/ + :alt: PyPI - Python Version +.. |license| image:: https://img.shields.io/pypi/l/paramiko + :target: https://github.com/paramiko/paramiko/blob/main/LICENSE + :alt: PyPI - License +.. |ci| image:: https://img.shields.io/circleci/build/github/paramiko/paramiko/main + :target: https://app.circleci.com/pipelines/github/paramiko/paramiko + :alt: CircleCI +.. |coverage| image:: https://img.shields.io/codecov/c/gh/paramiko/paramiko + :target: https://app.codecov.io/gh/paramiko/paramiko + :alt: Codecov + +Welcome to Paramiko! +==================== + +Paramiko is a pure-Python [#]_ implementation of the SSHv2 protocol [#]_, +providing both client and server functionality. It provides the foundation for +the high-level SSH library `Fabric `_, which is what we +recommend you use for common client use-cases such as running remote shell +commands or transferring files. + +Direct use of Paramiko itself is only intended for users who need +advanced/low-level primitives or want to run an in-Python sshd. + +For installation information, changelogs, FAQs and similar, please visit `our +main project website `_; for API details, see `the +versioned docs `_. Additionally, the project +maintainer keeps a `roadmap `_ on his +personal site. + +.. [#] + Paramiko relies on `cryptography `_ for crypto + functionality, which makes use of C and Rust extensions but has many + precompiled options available. See `our installation page + `_ for details. + +.. [#] + OpenSSH's RFC specification page is a fantastic resource and collection of + links that we won't bother replicating here: + https://www.openssh.com/specs.html + + OpenSSH itself also happens to be our primary reference implementation: + when in doubt, we consult how they do things, unless there are good reasons + not to. There are always some gaps, but we do our best to reconcile them + when possible. diff --git a/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/RECORD b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/RECORD new file mode 100644 index 0000000..989a896 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/RECORD @@ -0,0 +1,95 @@ +paramiko-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +paramiko-4.0.0.dist-info/METADATA,sha256=9W89LHpZs7eu34MZOmkAWnscpX_N-pwZN5RNRAWQQTI,3900 +paramiko-4.0.0.dist-info/RECORD,, +paramiko-4.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +paramiko-4.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +paramiko-4.0.0.dist-info/licenses/LICENSE,sha256=X6Jb9fOV_SbnAcLh3kyn0WKBaYbceRwi-PQiaFetG7I,26436 +paramiko-4.0.0.dist-info/top_level.txt,sha256=R9n-eCc_1kx1DnijF7Glmm-H67k9jUz5rm2YoPL8n54,9 +paramiko/__init__.py,sha256=aU-VhYiW5aIJosmuPeSteR1h5GeLOXNqdcpkaicsCwg,3523 +paramiko/__pycache__/__init__.cpython-39.pyc,, +paramiko/__pycache__/_winapi.cpython-39.pyc,, +paramiko/__pycache__/agent.cpython-39.pyc,, +paramiko/__pycache__/auth_handler.cpython-39.pyc,, +paramiko/__pycache__/auth_strategy.cpython-39.pyc,, +paramiko/__pycache__/ber.cpython-39.pyc,, +paramiko/__pycache__/buffered_pipe.cpython-39.pyc,, +paramiko/__pycache__/channel.cpython-39.pyc,, +paramiko/__pycache__/client.cpython-39.pyc,, +paramiko/__pycache__/common.cpython-39.pyc,, +paramiko/__pycache__/compress.cpython-39.pyc,, +paramiko/__pycache__/config.cpython-39.pyc,, +paramiko/__pycache__/ecdsakey.cpython-39.pyc,, +paramiko/__pycache__/ed25519key.cpython-39.pyc,, +paramiko/__pycache__/file.cpython-39.pyc,, +paramiko/__pycache__/hostkeys.cpython-39.pyc,, +paramiko/__pycache__/kex_curve25519.cpython-39.pyc,, +paramiko/__pycache__/kex_ecdh_nist.cpython-39.pyc,, +paramiko/__pycache__/kex_gex.cpython-39.pyc,, +paramiko/__pycache__/kex_group1.cpython-39.pyc,, +paramiko/__pycache__/kex_group14.cpython-39.pyc,, +paramiko/__pycache__/kex_group16.cpython-39.pyc,, +paramiko/__pycache__/kex_gss.cpython-39.pyc,, +paramiko/__pycache__/message.cpython-39.pyc,, +paramiko/__pycache__/packet.cpython-39.pyc,, +paramiko/__pycache__/pipe.cpython-39.pyc,, +paramiko/__pycache__/pkey.cpython-39.pyc,, +paramiko/__pycache__/primes.cpython-39.pyc,, +paramiko/__pycache__/proxy.cpython-39.pyc,, +paramiko/__pycache__/rsakey.cpython-39.pyc,, +paramiko/__pycache__/server.cpython-39.pyc,, +paramiko/__pycache__/sftp.cpython-39.pyc,, +paramiko/__pycache__/sftp_attr.cpython-39.pyc,, +paramiko/__pycache__/sftp_client.cpython-39.pyc,, +paramiko/__pycache__/sftp_file.cpython-39.pyc,, +paramiko/__pycache__/sftp_handle.cpython-39.pyc,, +paramiko/__pycache__/sftp_server.cpython-39.pyc,, +paramiko/__pycache__/sftp_si.cpython-39.pyc,, +paramiko/__pycache__/ssh_exception.cpython-39.pyc,, +paramiko/__pycache__/ssh_gss.cpython-39.pyc,, +paramiko/__pycache__/transport.cpython-39.pyc,, +paramiko/__pycache__/util.cpython-39.pyc,, +paramiko/__pycache__/win_openssh.cpython-39.pyc,, +paramiko/__pycache__/win_pageant.cpython-39.pyc,, +paramiko/_winapi.py,sha256=e4PyDmHmyLcAkZo4WAX7ah_I6fq4ex7A8FhxOPYAoA8,11204 +paramiko/agent.py,sha256=4vP4knAAzZiSblzSM_srbTYK2hVnUUT561vTBdCe2i4,15877 +paramiko/auth_handler.py,sha256=kMY00x5sUkrcR9uRHIIakQw4E6649oW1tMtIQPrFMFo,43006 +paramiko/auth_strategy.py,sha256=Pjcp8q64gUwk4CneGOnOhW0WBeKBRFURieWqC9AN0Ec,11437 +paramiko/ber.py,sha256=uFb-YokU4Rg2fKjyX8VMAu05STVk37YRgghlNHmdoYo,4369 +paramiko/buffered_pipe.py,sha256=AlkTLHYWbj4W-ZD7ORQZFjEFv7kC7QSvEYypfiHpwxw,7225 +paramiko/channel.py,sha256=MXO-C5dipy8Q0Shh9ceR-CPPiBB-ssT_9oIgwzBhQ_o,49222 +paramiko/client.py,sha256=d1UAVgVf_eWf-VqpwsjhyMFo4IEZcX2-rzZtkomsffY,34337 +paramiko/common.py,sha256=sBJW8KJz_EE8TsT7wLWTPuUiL2nNsLa_cfrTCe9Fyio,7756 +paramiko/compress.py,sha256=RCHTino0cHz1dy1pLbOhFhdWfGl4u50VmBcbT7qBWNc,1282 +paramiko/config.py,sha256=QPzwsk4Vem-Ecg2NhjRu78O9SU5ZO6DmfxZTA6cHWco,27362 +paramiko/ecdsakey.py,sha256=nK8oxORGgLP-zoC2REG46bAchVrlr35jfuxTn_Ac8sM,11653 +paramiko/ed25519key.py,sha256=FYurG0gqxmhNKh_22Hp3XEON5zuvzv-r5w8y9yJQgqY,7457 +paramiko/file.py,sha256=NgbhUjYgrLh-HQtsdYlPZ3CyvS0jhXqePk45GhHPMSo,19063 +paramiko/hostkeys.py,sha256=Ez2gaZF5ntj-vTvMbVXZoLRpU6tBnhSbXJm5FUlvzhw,13144 +paramiko/kex_curve25519.py,sha256=voEFDs_zkgEdWOqDakU-5DLYO3qotWcXYiqOCUP4GDo,4436 +paramiko/kex_ecdh_nist.py,sha256=RbHPwv8Gu5iR9LwMf-N0yUjXEQgRKKBLaAT3dacv44Q,5012 +paramiko/kex_gex.py,sha256=j5fPexu48CGObvpPKn0kZTjdn1onfz0iYhh8p8kIgM0,10320 +paramiko/kex_group1.py,sha256=HfzkLH1SKaIavnN-LGuF-lAMaAECB6Izj_TELhg4Omc,5740 +paramiko/kex_group14.py,sha256=AX7xrTCqMROrMQ_3Dp8WmLkNN8dTovhPjtWgaLLpRxs,1833 +paramiko/kex_group16.py,sha256=s7qB7tSDFkG5ztlg3mV958UVWnKgn1LIA-B2t-h1eX4,2288 +paramiko/kex_gss.py,sha256=BadM1nNN-ORDRuJmb93v0xBGQlce1n29lT4ihsnmY-4,24562 +paramiko/message.py,sha256=wHTWVU_Xgfq-djOOPVF5jAsE-XgADoH47G0iI5N69gY,9349 +paramiko/packet.py,sha256=CocYnZ2Vbz7VRo-6BGMhlRWro7FLIISpxTiYeoEsyaM,24314 +paramiko/pipe.py,sha256=cmWwOyMdys62IGLC9lDznwTu11xLg6wB9mV-60lr86A,3902 +paramiko/pkey.py,sha256=E3hegNR3eS16MMVGEW2v5f_5PBcKjNwqJ_by2HXvfdc,36719 +paramiko/primes.py,sha256=6Uv0fFsTmIJxInMqeNhryw9jrzvgNksKbA7ecBI0g5E,5107 +paramiko/proxy.py,sha256=I5XxN1aDren3Fw1f3SOoQLP4O9O7jeyey9meG6Og0q4,4648 +paramiko/rsakey.py,sha256=7xoDJvfcaZVVYRGlv8xamhO3zYvE-wI_Nd814L8TxzQ,7546 +paramiko/server.py,sha256=oNkI7t2gSMYIwLov5vl_BbHU-AwFC5LxP78YIXw7mq4,30457 +paramiko/sftp.py,sha256=pyZPnR0fv94YopfPDpslloTiYelu5GuM70cXUGOaKHM,6471 +paramiko/sftp_attr.py,sha256=AX-cG_FiPinftQQq8Ndo1Mc_bZz-AhXFQQpac-oV0wg,8258 +paramiko/sftp_client.py,sha256=e_zi6V233tjx3DH9TH7rRDKRO-TCZ_zyOkBw4sSRIjo,35855 +paramiko/sftp_file.py,sha256=NgVfDhxxURhFrEqniIJQgKQ6wlgCTgOVu5GwQczW_hk,21820 +paramiko/sftp_handle.py,sha256=ho-eyiEvhYHt-_VytznNzNeGktfaIsQX5l4bespWZAk,7424 +paramiko/sftp_server.py,sha256=yH-BgsYj7BuZNGn_EHpnLRPmoNGoYB9g_XxOlK4IcYA,19492 +paramiko/sftp_si.py,sha256=Uf90bFme6Jy6yl7k4jJ28IJboq6KiyPWLjXgP9DR6gk,12544 +paramiko/ssh_exception.py,sha256=F82_vTnKr3UF7ai8dTEv6PnqwVoREyk2c9_Bo3smsrg,7494 +paramiko/ssh_gss.py,sha256=BNhiDON1FOJB2P2VQUQHLYJ7RZhTbDjc7NPMqSNwH6Y,28713 +paramiko/transport.py,sha256=BuO3Ai0aaE61rQ5i_WZ7Y-ZYhJqsxIZl0bXDwi5pLKU,135414 +paramiko/util.py,sha256=7eEtwmxiST4Jj3HIqB7irz0SMofJlmy4yuYqda-rqPs,9494 +paramiko/win_openssh.py,sha256=DbWJT0hiE6UImAbMqehcGuVLDWIl-2rObe-AhaGuWpk,1918 +paramiko/win_pageant.py,sha256=i5TG472VzJKVnK08oxM4hK_qb9IzL_Fo96B8ouaxXHo,4177 diff --git a/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/REQUESTED b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/WHEEL new file mode 100644 index 0000000..e7fa31b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/licenses/LICENSE b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..d12bef0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/licenses/LICENSE @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Suite 500, Boston, MA 02110-1335 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! + + diff --git a/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..8608c1b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko-4.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +paramiko diff --git a/.venv/lib/python3.9/site-packages/paramiko/__init__.py b/.venv/lib/python3.9/site-packages/paramiko/__init__.py new file mode 100644 index 0000000..92ff86f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/__init__.py @@ -0,0 +1,120 @@ +# Copyright (C) 2003-2011 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +from importlib import metadata + +__version__ = metadata.version("paramiko") + +# flake8: noqa +from paramiko.transport import ( + SecurityOptions, + ServiceRequestingTransport, + Transport, +) +from paramiko.client import ( + AutoAddPolicy, + MissingHostKeyPolicy, + RejectPolicy, + SSHClient, + WarningPolicy, +) +from paramiko.auth_handler import AuthHandler +from paramiko.auth_strategy import ( + AuthFailure, + AuthStrategy, + AuthResult, + AuthSource, + InMemoryPrivateKey, + NoneAuth, + OnDiskPrivateKey, + Password, + PrivateKey, + SourceResult, +) +from paramiko.ssh_gss import GSSAuth, GSS_AUTH_AVAILABLE, GSS_EXCEPTIONS +from paramiko.channel import ( + Channel, + ChannelFile, + ChannelStderrFile, + ChannelStdinFile, +) +from paramiko.ssh_exception import ( + AuthenticationException, + BadAuthenticationType, + BadHostKeyException, + ChannelException, + ConfigParseError, + CouldNotCanonicalize, + IncompatiblePeer, + MessageOrderError, + PasswordRequiredException, + ProxyCommandFailure, + SSHException, +) +from paramiko.server import ServerInterface, SubsystemHandler, InteractiveQuery +from paramiko.rsakey import RSAKey +from paramiko.ecdsakey import ECDSAKey +from paramiko.ed25519key import Ed25519Key +from paramiko.sftp import SFTPError, BaseSFTP +from paramiko.sftp_client import SFTP, SFTPClient +from paramiko.sftp_server import SFTPServer +from paramiko.sftp_attr import SFTPAttributes +from paramiko.sftp_handle import SFTPHandle +from paramiko.sftp_si import SFTPServerInterface +from paramiko.sftp_file import SFTPFile +from paramiko.message import Message +from paramiko.packet import Packetizer +from paramiko.file import BufferedFile +from paramiko.agent import Agent, AgentKey +from paramiko.pkey import PKey, PublicBlob, UnknownKeyType +from paramiko.hostkeys import HostKeys +from paramiko.config import SSHConfig, SSHConfigDict +from paramiko.proxy import ProxyCommand + +from paramiko.common import ( + AUTH_SUCCESSFUL, + AUTH_PARTIALLY_SUCCESSFUL, + AUTH_FAILED, + OPEN_SUCCEEDED, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + OPEN_FAILED_CONNECT_FAILED, + OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, + OPEN_FAILED_RESOURCE_SHORTAGE, +) + +from paramiko.sftp import ( + SFTP_OK, + SFTP_EOF, + SFTP_NO_SUCH_FILE, + SFTP_PERMISSION_DENIED, + SFTP_FAILURE, + SFTP_BAD_MESSAGE, + SFTP_NO_CONNECTION, + SFTP_CONNECTION_LOST, + SFTP_OP_UNSUPPORTED, +) + +from paramiko.common import io_sleep + + +# TODO: I guess a real plugin system might be nice for future expansion... +key_classes = [RSAKey, Ed25519Key, ECDSAKey] + + +__author__ = "Jeff Forcier " +__license__ = "GNU Lesser General Public License (LGPL)" diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..84940d1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/_winapi.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/_winapi.cpython-39.pyc new file mode 100644 index 0000000..56295f7 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/_winapi.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/agent.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/agent.cpython-39.pyc new file mode 100644 index 0000000..a921b05 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/agent.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/auth_handler.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/auth_handler.cpython-39.pyc new file mode 100644 index 0000000..04e7254 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/auth_handler.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/auth_strategy.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/auth_strategy.cpython-39.pyc new file mode 100644 index 0000000..f4ae50a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/auth_strategy.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ber.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ber.cpython-39.pyc new file mode 100644 index 0000000..b1731f3 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ber.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/buffered_pipe.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/buffered_pipe.cpython-39.pyc new file mode 100644 index 0000000..0d52d68 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/buffered_pipe.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/channel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/channel.cpython-39.pyc new file mode 100644 index 0000000..4079ea9 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/channel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/client.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/client.cpython-39.pyc new file mode 100644 index 0000000..b3dc78b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/client.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/common.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/common.cpython-39.pyc new file mode 100644 index 0000000..37821bb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/common.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/compress.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/compress.cpython-39.pyc new file mode 100644 index 0000000..ad311f2 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/compress.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/config.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/config.cpython-39.pyc new file mode 100644 index 0000000..0affc93 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/config.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ecdsakey.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ecdsakey.cpython-39.pyc new file mode 100644 index 0000000..3f8f6aa Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ecdsakey.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ed25519key.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ed25519key.cpython-39.pyc new file mode 100644 index 0000000..f352805 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ed25519key.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/file.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/file.cpython-39.pyc new file mode 100644 index 0000000..40d1dca Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/file.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/hostkeys.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/hostkeys.cpython-39.pyc new file mode 100644 index 0000000..88de63a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/hostkeys.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_curve25519.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_curve25519.cpython-39.pyc new file mode 100644 index 0000000..8af2ae1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_curve25519.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_ecdh_nist.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_ecdh_nist.cpython-39.pyc new file mode 100644 index 0000000..9ead5be Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_ecdh_nist.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_gex.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_gex.cpython-39.pyc new file mode 100644 index 0000000..ae6499e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_gex.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_group1.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_group1.cpython-39.pyc new file mode 100644 index 0000000..cff06bc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_group1.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_group14.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_group14.cpython-39.pyc new file mode 100644 index 0000000..d28329d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_group14.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_group16.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_group16.cpython-39.pyc new file mode 100644 index 0000000..185ac7d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_group16.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_gss.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_gss.cpython-39.pyc new file mode 100644 index 0000000..42e2772 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/kex_gss.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/message.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/message.cpython-39.pyc new file mode 100644 index 0000000..456093d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/message.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/packet.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/packet.cpython-39.pyc new file mode 100644 index 0000000..06e5436 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/packet.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/pipe.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/pipe.cpython-39.pyc new file mode 100644 index 0000000..bd4aba8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/pipe.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/pkey.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/pkey.cpython-39.pyc new file mode 100644 index 0000000..75ac8c6 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/pkey.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/primes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/primes.cpython-39.pyc new file mode 100644 index 0000000..735b48e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/primes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/proxy.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/proxy.cpython-39.pyc new file mode 100644 index 0000000..7dd7391 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/proxy.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/rsakey.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/rsakey.cpython-39.pyc new file mode 100644 index 0000000..c71bd97 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/rsakey.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/server.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/server.cpython-39.pyc new file mode 100644 index 0000000..77f7eb1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/server.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp.cpython-39.pyc new file mode 100644 index 0000000..3929982 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_attr.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_attr.cpython-39.pyc new file mode 100644 index 0000000..de2ebad Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_attr.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_client.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_client.cpython-39.pyc new file mode 100644 index 0000000..a0b5c34 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_client.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_file.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_file.cpython-39.pyc new file mode 100644 index 0000000..24feaab Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_file.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_handle.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_handle.cpython-39.pyc new file mode 100644 index 0000000..ce17541 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_handle.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_server.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_server.cpython-39.pyc new file mode 100644 index 0000000..b09b5bd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_server.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_si.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_si.cpython-39.pyc new file mode 100644 index 0000000..e8a4d07 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/sftp_si.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ssh_exception.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ssh_exception.cpython-39.pyc new file mode 100644 index 0000000..39c7273 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ssh_exception.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ssh_gss.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ssh_gss.cpython-39.pyc new file mode 100644 index 0000000..9b9aa60 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/ssh_gss.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/transport.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/transport.cpython-39.pyc new file mode 100644 index 0000000..299ea55 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/transport.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/util.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/util.cpython-39.pyc new file mode 100644 index 0000000..86af327 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/util.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/win_openssh.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/win_openssh.cpython-39.pyc new file mode 100644 index 0000000..00b9fd9 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/win_openssh.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/__pycache__/win_pageant.cpython-39.pyc b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/win_pageant.cpython-39.pyc new file mode 100644 index 0000000..b7556ca Binary files /dev/null and b/.venv/lib/python3.9/site-packages/paramiko/__pycache__/win_pageant.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/paramiko/_winapi.py b/.venv/lib/python3.9/site-packages/paramiko/_winapi.py new file mode 100644 index 0000000..4295457 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/_winapi.py @@ -0,0 +1,413 @@ +""" +Windows API functions implemented as ctypes functions and classes as found +in jaraco.windows (3.4.1). + +If you encounter issues with this module, please consider reporting the issues +in jaraco.windows and asking the author to port the fixes back here. +""" + +import builtins +import ctypes.wintypes + +from paramiko.util import u + + +###################### +# jaraco.windows.error + + +def format_system_message(errno): + """ + Call FormatMessage with a system error number to retrieve + the descriptive error message. + """ + # first some flags used by FormatMessageW + ALLOCATE_BUFFER = 0x100 + FROM_SYSTEM = 0x1000 + + # Let FormatMessageW allocate the buffer (we'll free it below) + # Also, let it know we want a system error message. + flags = ALLOCATE_BUFFER | FROM_SYSTEM + source = None + message_id = errno + language_id = 0 + result_buffer = ctypes.wintypes.LPWSTR() + buffer_size = 0 + arguments = None + bytes = ctypes.windll.kernel32.FormatMessageW( + flags, + source, + message_id, + language_id, + ctypes.byref(result_buffer), + buffer_size, + arguments, + ) + # note the following will cause an infinite loop if GetLastError + # repeatedly returns an error that cannot be formatted, although + # this should not happen. + handle_nonzero_success(bytes) + message = result_buffer.value + ctypes.windll.kernel32.LocalFree(result_buffer) + return message + + +class WindowsError(builtins.WindowsError): + """more info about errors at + http://msdn.microsoft.com/en-us/library/ms681381(VS.85).aspx""" + + def __init__(self, value=None): + if value is None: + value = ctypes.windll.kernel32.GetLastError() + strerror = format_system_message(value) + args = 0, strerror, None, value + super().__init__(*args) + + @property + def message(self): + return self.strerror + + @property + def code(self): + return self.winerror + + def __str__(self): + return self.message + + def __repr__(self): + return "{self.__class__.__name__}({self.winerror})".format(**vars()) + + +def handle_nonzero_success(result): + if result == 0: + raise WindowsError() + + +########################### +# jaraco.windows.api.memory + +GMEM_MOVEABLE = 0x2 + +GlobalAlloc = ctypes.windll.kernel32.GlobalAlloc +GlobalAlloc.argtypes = ctypes.wintypes.UINT, ctypes.c_size_t +GlobalAlloc.restype = ctypes.wintypes.HANDLE + +GlobalLock = ctypes.windll.kernel32.GlobalLock +GlobalLock.argtypes = (ctypes.wintypes.HGLOBAL,) +GlobalLock.restype = ctypes.wintypes.LPVOID + +GlobalUnlock = ctypes.windll.kernel32.GlobalUnlock +GlobalUnlock.argtypes = (ctypes.wintypes.HGLOBAL,) +GlobalUnlock.restype = ctypes.wintypes.BOOL + +GlobalSize = ctypes.windll.kernel32.GlobalSize +GlobalSize.argtypes = (ctypes.wintypes.HGLOBAL,) +GlobalSize.restype = ctypes.c_size_t + +CreateFileMapping = ctypes.windll.kernel32.CreateFileMappingW +CreateFileMapping.argtypes = [ + ctypes.wintypes.HANDLE, + ctypes.c_void_p, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, + ctypes.wintypes.LPWSTR, +] +CreateFileMapping.restype = ctypes.wintypes.HANDLE + +MapViewOfFile = ctypes.windll.kernel32.MapViewOfFile +MapViewOfFile.restype = ctypes.wintypes.HANDLE + +UnmapViewOfFile = ctypes.windll.kernel32.UnmapViewOfFile +UnmapViewOfFile.argtypes = (ctypes.wintypes.HANDLE,) + +RtlMoveMemory = ctypes.windll.kernel32.RtlMoveMemory +RtlMoveMemory.argtypes = (ctypes.c_void_p, ctypes.c_void_p, ctypes.c_size_t) + +ctypes.windll.kernel32.LocalFree.argtypes = (ctypes.wintypes.HLOCAL,) + +##################### +# jaraco.windows.mmap + + +class MemoryMap: + """ + A memory map object which can have security attributes overridden. + """ + + def __init__(self, name, length, security_attributes=None): + self.name = name + self.length = length + self.security_attributes = security_attributes + self.pos = 0 + + def __enter__(self): + p_SA = ( + ctypes.byref(self.security_attributes) + if self.security_attributes + else None + ) + INVALID_HANDLE_VALUE = -1 + PAGE_READWRITE = 0x4 + FILE_MAP_WRITE = 0x2 + filemap = ctypes.windll.kernel32.CreateFileMappingW( + INVALID_HANDLE_VALUE, + p_SA, + PAGE_READWRITE, + 0, + self.length, + u(self.name), + ) + handle_nonzero_success(filemap) + if filemap == INVALID_HANDLE_VALUE: + raise Exception("Failed to create file mapping") + self.filemap = filemap + self.view = MapViewOfFile(filemap, FILE_MAP_WRITE, 0, 0, 0) + return self + + def seek(self, pos): + self.pos = pos + + def write(self, msg): + assert isinstance(msg, bytes) + n = len(msg) + if self.pos + n >= self.length: # A little safety. + raise ValueError(f"Refusing to write {n} bytes") + dest = self.view + self.pos + length = ctypes.c_size_t(n) + ctypes.windll.kernel32.RtlMoveMemory(dest, msg, length) + self.pos += n + + def read(self, n): + """ + Read n bytes from mapped view. + """ + out = ctypes.create_string_buffer(n) + source = self.view + self.pos + length = ctypes.c_size_t(n) + ctypes.windll.kernel32.RtlMoveMemory(out, source, length) + self.pos += n + return out.raw + + def __exit__(self, exc_type, exc_val, tb): + ctypes.windll.kernel32.UnmapViewOfFile(self.view) + ctypes.windll.kernel32.CloseHandle(self.filemap) + + +############################# +# jaraco.windows.api.security + +# from WinNT.h +READ_CONTROL = 0x00020000 +STANDARD_RIGHTS_REQUIRED = 0x000F0000 +STANDARD_RIGHTS_READ = READ_CONTROL +STANDARD_RIGHTS_WRITE = READ_CONTROL +STANDARD_RIGHTS_EXECUTE = READ_CONTROL +STANDARD_RIGHTS_ALL = 0x001F0000 + +# from NTSecAPI.h +POLICY_VIEW_LOCAL_INFORMATION = 0x00000001 +POLICY_VIEW_AUDIT_INFORMATION = 0x00000002 +POLICY_GET_PRIVATE_INFORMATION = 0x00000004 +POLICY_TRUST_ADMIN = 0x00000008 +POLICY_CREATE_ACCOUNT = 0x00000010 +POLICY_CREATE_SECRET = 0x00000020 +POLICY_CREATE_PRIVILEGE = 0x00000040 +POLICY_SET_DEFAULT_QUOTA_LIMITS = 0x00000080 +POLICY_SET_AUDIT_REQUIREMENTS = 0x00000100 +POLICY_AUDIT_LOG_ADMIN = 0x00000200 +POLICY_SERVER_ADMIN = 0x00000400 +POLICY_LOOKUP_NAMES = 0x00000800 +POLICY_NOTIFICATION = 0x00001000 + +POLICY_ALL_ACCESS = ( + STANDARD_RIGHTS_REQUIRED + | POLICY_VIEW_LOCAL_INFORMATION + | POLICY_VIEW_AUDIT_INFORMATION + | POLICY_GET_PRIVATE_INFORMATION + | POLICY_TRUST_ADMIN + | POLICY_CREATE_ACCOUNT + | POLICY_CREATE_SECRET + | POLICY_CREATE_PRIVILEGE + | POLICY_SET_DEFAULT_QUOTA_LIMITS + | POLICY_SET_AUDIT_REQUIREMENTS + | POLICY_AUDIT_LOG_ADMIN + | POLICY_SERVER_ADMIN + | POLICY_LOOKUP_NAMES +) + + +POLICY_READ = ( + STANDARD_RIGHTS_READ + | POLICY_VIEW_AUDIT_INFORMATION + | POLICY_GET_PRIVATE_INFORMATION +) + +POLICY_WRITE = ( + STANDARD_RIGHTS_WRITE + | POLICY_TRUST_ADMIN + | POLICY_CREATE_ACCOUNT + | POLICY_CREATE_SECRET + | POLICY_CREATE_PRIVILEGE + | POLICY_SET_DEFAULT_QUOTA_LIMITS + | POLICY_SET_AUDIT_REQUIREMENTS + | POLICY_AUDIT_LOG_ADMIN + | POLICY_SERVER_ADMIN +) + +POLICY_EXECUTE = ( + STANDARD_RIGHTS_EXECUTE + | POLICY_VIEW_LOCAL_INFORMATION + | POLICY_LOOKUP_NAMES +) + + +class TokenAccess: + TOKEN_QUERY = 0x8 + + +class TokenInformationClass: + TokenUser = 1 + + +class TOKEN_USER(ctypes.Structure): + num = 1 + _fields_ = [ + ("SID", ctypes.c_void_p), + ("ATTRIBUTES", ctypes.wintypes.DWORD), + ] + + +class SECURITY_DESCRIPTOR(ctypes.Structure): + """ + typedef struct _SECURITY_DESCRIPTOR + { + UCHAR Revision; + UCHAR Sbz1; + SECURITY_DESCRIPTOR_CONTROL Control; + PSID Owner; + PSID Group; + PACL Sacl; + PACL Dacl; + } SECURITY_DESCRIPTOR; + """ + + SECURITY_DESCRIPTOR_CONTROL = ctypes.wintypes.USHORT + REVISION = 1 + + _fields_ = [ + ("Revision", ctypes.c_ubyte), + ("Sbz1", ctypes.c_ubyte), + ("Control", SECURITY_DESCRIPTOR_CONTROL), + ("Owner", ctypes.c_void_p), + ("Group", ctypes.c_void_p), + ("Sacl", ctypes.c_void_p), + ("Dacl", ctypes.c_void_p), + ] + + +class SECURITY_ATTRIBUTES(ctypes.Structure): + """ + typedef struct _SECURITY_ATTRIBUTES { + DWORD nLength; + LPVOID lpSecurityDescriptor; + BOOL bInheritHandle; + } SECURITY_ATTRIBUTES; + """ + + _fields_ = [ + ("nLength", ctypes.wintypes.DWORD), + ("lpSecurityDescriptor", ctypes.c_void_p), + ("bInheritHandle", ctypes.wintypes.BOOL), + ] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.nLength = ctypes.sizeof(SECURITY_ATTRIBUTES) + + @property + def descriptor(self): + return self._descriptor + + @descriptor.setter + def descriptor(self, value): + self._descriptor = value + self.lpSecurityDescriptor = ctypes.addressof(value) + + +ctypes.windll.advapi32.SetSecurityDescriptorOwner.argtypes = ( + ctypes.POINTER(SECURITY_DESCRIPTOR), + ctypes.c_void_p, + ctypes.wintypes.BOOL, +) + +######################### +# jaraco.windows.security + + +def GetTokenInformation(token, information_class): + """ + Given a token, get the token information for it. + """ + data_size = ctypes.wintypes.DWORD() + ctypes.windll.advapi32.GetTokenInformation( + token, information_class.num, 0, 0, ctypes.byref(data_size) + ) + data = ctypes.create_string_buffer(data_size.value) + handle_nonzero_success( + ctypes.windll.advapi32.GetTokenInformation( + token, + information_class.num, + ctypes.byref(data), + ctypes.sizeof(data), + ctypes.byref(data_size), + ) + ) + return ctypes.cast(data, ctypes.POINTER(TOKEN_USER)).contents + + +def OpenProcessToken(proc_handle, access): + result = ctypes.wintypes.HANDLE() + proc_handle = ctypes.wintypes.HANDLE(proc_handle) + handle_nonzero_success( + ctypes.windll.advapi32.OpenProcessToken( + proc_handle, access, ctypes.byref(result) + ) + ) + return result + + +def get_current_user(): + """ + Return a TOKEN_USER for the owner of this process. + """ + process = OpenProcessToken( + ctypes.windll.kernel32.GetCurrentProcess(), TokenAccess.TOKEN_QUERY + ) + return GetTokenInformation(process, TOKEN_USER) + + +def get_security_attributes_for_user(user=None): + """ + Return a SECURITY_ATTRIBUTES structure with the SID set to the + specified user (uses current user if none is specified). + """ + if user is None: + user = get_current_user() + + assert isinstance(user, TOKEN_USER), "user must be TOKEN_USER instance" + + SD = SECURITY_DESCRIPTOR() + SA = SECURITY_ATTRIBUTES() + # by attaching the actual security descriptor, it will be garbage- + # collected with the security attributes + SA.descriptor = SD + SA.bInheritHandle = 1 + + ctypes.windll.advapi32.InitializeSecurityDescriptor( + ctypes.byref(SD), SECURITY_DESCRIPTOR.REVISION + ) + ctypes.windll.advapi32.SetSecurityDescriptorOwner( + ctypes.byref(SD), user.SID, 0 + ) + return SA diff --git a/.venv/lib/python3.9/site-packages/paramiko/agent.py b/.venv/lib/python3.9/site-packages/paramiko/agent.py new file mode 100644 index 0000000..b29a0d1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/agent.py @@ -0,0 +1,497 @@ +# Copyright (C) 2003-2007 John Rochester +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +SSH Agent interface +""" + +import os +import socket +import struct +import sys +import threading +import time +import tempfile +import stat +from logging import DEBUG +from select import select +from paramiko.common import io_sleep, byte_chr + +from paramiko.ssh_exception import SSHException, AuthenticationException +from paramiko.message import Message +from paramiko.pkey import PKey, UnknownKeyType +from paramiko.util import asbytes, get_logger + +cSSH2_AGENTC_REQUEST_IDENTITIES = byte_chr(11) +SSH2_AGENT_IDENTITIES_ANSWER = 12 +cSSH2_AGENTC_SIGN_REQUEST = byte_chr(13) +SSH2_AGENT_SIGN_RESPONSE = 14 + +SSH_AGENT_RSA_SHA2_256 = 2 +SSH_AGENT_RSA_SHA2_512 = 4 +# NOTE: RFC mildly confusing; while these flags are OR'd together, OpenSSH at +# least really treats them like "AND"s, in the sense that if it finds the +# SHA256 flag set it won't continue looking at the SHA512 one; it +# short-circuits right away. +# Thus, we never want to eg submit 6 to say "either's good". +ALGORITHM_FLAG_MAP = { + "rsa-sha2-256": SSH_AGENT_RSA_SHA2_256, + "rsa-sha2-512": SSH_AGENT_RSA_SHA2_512, +} +for key, value in list(ALGORITHM_FLAG_MAP.items()): + ALGORITHM_FLAG_MAP[f"{key}-cert-v01@openssh.com"] = value + + +# TODO 4.0: rename all these - including making some of their methods public? +class AgentSSH: + def __init__(self): + self._conn = None + self._keys = () + + def get_keys(self): + """ + Return the list of keys available through the SSH agent, if any. If + no SSH agent was running (or it couldn't be contacted), an empty list + will be returned. + + This method performs no IO, just returns the list of keys retrieved + when the connection was made. + + :return: + a tuple of `.AgentKey` objects representing keys available on the + SSH agent + """ + return self._keys + + def _connect(self, conn): + self._conn = conn + ptype, result = self._send_message(cSSH2_AGENTC_REQUEST_IDENTITIES) + if ptype != SSH2_AGENT_IDENTITIES_ANSWER: + raise SSHException("could not get keys from ssh-agent") + keys = [] + for i in range(result.get_int()): + keys.append( + AgentKey( + agent=self, + blob=result.get_binary(), + comment=result.get_text(), + ) + ) + self._keys = tuple(keys) + + def _close(self): + if self._conn is not None: + self._conn.close() + self._conn = None + self._keys = () + + def _send_message(self, msg): + msg = asbytes(msg) + self._conn.send(struct.pack(">I", len(msg)) + msg) + data = self._read_all(4) + msg = Message(self._read_all(struct.unpack(">I", data)[0])) + return ord(msg.get_byte()), msg + + def _read_all(self, wanted): + result = self._conn.recv(wanted) + while len(result) < wanted: + if len(result) == 0: + raise SSHException("lost ssh-agent") + extra = self._conn.recv(wanted - len(result)) + if len(extra) == 0: + raise SSHException("lost ssh-agent") + result += extra + return result + + +class AgentProxyThread(threading.Thread): + """ + Class in charge of communication between two channels. + """ + + def __init__(self, agent): + threading.Thread.__init__(self, target=self.run) + self._agent = agent + self._exit = False + + def run(self): + try: + (r, addr) = self.get_connection() + # Found that r should be either + # a socket from the socket library or None + self.__inr = r + # The address should be an IP address as a string? or None + self.__addr = addr + self._agent.connect() + if not isinstance(self._agent, int) and ( + self._agent._conn is None + or not hasattr(self._agent._conn, "fileno") + ): + raise AuthenticationException("Unable to connect to SSH agent") + self._communicate() + except: + # XXX Not sure what to do here ... raise or pass ? + raise + + def _communicate(self): + import fcntl + + oldflags = fcntl.fcntl(self.__inr, fcntl.F_GETFL) + fcntl.fcntl(self.__inr, fcntl.F_SETFL, oldflags | os.O_NONBLOCK) + while not self._exit: + events = select([self._agent._conn, self.__inr], [], [], 0.5) + for fd in events[0]: + if self._agent._conn == fd: + data = self._agent._conn.recv(512) + if len(data) != 0: + self.__inr.send(data) + else: + self._close() + break + elif self.__inr == fd: + data = self.__inr.recv(512) + if len(data) != 0: + self._agent._conn.send(data) + else: + self._close() + break + time.sleep(io_sleep) + + def _close(self): + self._exit = True + self.__inr.close() + self._agent._conn.close() + + +class AgentLocalProxy(AgentProxyThread): + """ + Class to be used when wanting to ask a local SSH Agent being + asked from a remote fake agent (so use a unix socket for ex.) + """ + + def __init__(self, agent): + AgentProxyThread.__init__(self, agent) + + def get_connection(self): + """ + Return a pair of socket object and string address. + + May block! + """ + conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + try: + conn.bind(self._agent._get_filename()) + conn.listen(1) + (r, addr) = conn.accept() + return r, addr + except: + raise + + +class AgentRemoteProxy(AgentProxyThread): + """ + Class to be used when wanting to ask a remote SSH Agent + """ + + def __init__(self, agent, chan): + AgentProxyThread.__init__(self, agent) + self.__chan = chan + + def get_connection(self): + return self.__chan, None + + +def get_agent_connection(): + """ + Returns some SSH agent object, or None if none were found/supported. + + .. versionadded:: 2.10 + """ + if ("SSH_AUTH_SOCK" in os.environ) and (sys.platform != "win32"): + conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + try: + conn.connect(os.environ["SSH_AUTH_SOCK"]) + return conn + except: + # probably a dangling env var: the ssh agent is gone + return + elif sys.platform == "win32": + from . import win_pageant, win_openssh + + conn = None + if win_pageant.can_talk_to_agent(): + conn = win_pageant.PageantConnection() + elif win_openssh.can_talk_to_agent(): + conn = win_openssh.OpenSSHAgentConnection() + return conn + else: + # no agent support + return + + +class AgentClientProxy: + """ + Class proxying request as a client: + + #. client ask for a request_forward_agent() + #. server creates a proxy and a fake SSH Agent + #. server ask for establishing a connection when needed, + calling the forward_agent_handler at client side. + #. the forward_agent_handler launch a thread for connecting + the remote fake agent and the local agent + #. Communication occurs ... + """ + + def __init__(self, chanRemote): + self._conn = None + self.__chanR = chanRemote + self.thread = AgentRemoteProxy(self, chanRemote) + self.thread.start() + + def __del__(self): + self.close() + + def connect(self): + """ + Method automatically called by ``AgentProxyThread.run``. + """ + conn = get_agent_connection() + if not conn: + return + self._conn = conn + + def close(self): + """ + Close the current connection and terminate the agent + Should be called manually + """ + if hasattr(self, "thread"): + self.thread._exit = True + self.thread.join(1000) + if self._conn is not None: + self._conn.close() + + +class AgentServerProxy(AgentSSH): + """ + Allows an SSH server to access a forwarded agent. + + This also creates a unix domain socket on the system to allow external + programs to also access the agent. For this reason, you probably only want + to create one of these. + + :meth:`connect` must be called before it is usable. This will also load the + list of keys the agent contains. You must also call :meth:`close` in + order to clean up the unix socket and the thread that maintains it. + (:class:`contextlib.closing` might be helpful to you.) + + :param .Transport t: Transport used for SSH Agent communication forwarding + + :raises: `.SSHException` -- mostly if we lost the agent + """ + + def __init__(self, t): + AgentSSH.__init__(self) + self.__t = t + self._dir = tempfile.mkdtemp("sshproxy") + os.chmod(self._dir, stat.S_IRWXU) + self._file = self._dir + "/sshproxy.ssh" + self.thread = AgentLocalProxy(self) + self.thread.start() + + def __del__(self): + self.close() + + def connect(self): + conn_sock = self.__t.open_forward_agent_channel() + if conn_sock is None: + raise SSHException("lost ssh-agent") + conn_sock.set_name("auth-agent") + self._connect(conn_sock) + + def close(self): + """ + Terminate the agent, clean the files, close connections + Should be called manually + """ + os.remove(self._file) + os.rmdir(self._dir) + self.thread._exit = True + self.thread.join(1000) + self._close() + + def get_env(self): + """ + Helper for the environment under unix + + :return: + a dict containing the ``SSH_AUTH_SOCK`` environment variables + """ + return {"SSH_AUTH_SOCK": self._get_filename()} + + def _get_filename(self): + return self._file + + +class AgentRequestHandler: + """ + Primary/default implementation of SSH agent forwarding functionality. + + Simply instantiate this class, handing it a live command-executing session + object, and it will handle forwarding any local SSH agent processes it + finds. + + For example:: + + # Connect + client = SSHClient() + client.connect(host, port, username) + # Obtain session + session = client.get_transport().open_session() + # Forward local agent + AgentRequestHandler(session) + # Commands executed after this point will see the forwarded agent on + # the remote end. + session.exec_command("git clone https://my.git.repository/") + """ + + def __init__(self, chanClient): + self._conn = None + self.__chanC = chanClient + chanClient.request_forward_agent(self._forward_agent_handler) + self.__clientProxys = [] + + def _forward_agent_handler(self, chanRemote): + self.__clientProxys.append(AgentClientProxy(chanRemote)) + + def __del__(self): + self.close() + + def close(self): + for p in self.__clientProxys: + p.close() + + +class Agent(AgentSSH): + """ + Client interface for using private keys from an SSH agent running on the + local machine. If an SSH agent is running, this class can be used to + connect to it and retrieve `.PKey` objects which can be used when + attempting to authenticate to remote SSH servers. + + Upon initialization, a session with the local machine's SSH agent is + opened, if one is running. If no agent is running, initialization will + succeed, but `get_keys` will return an empty tuple. + + :raises: `.SSHException` -- + if an SSH agent is found, but speaks an incompatible protocol + + .. versionchanged:: 2.10 + Added support for native openssh agent on windows (extending previous + putty pageant support) + """ + + def __init__(self): + AgentSSH.__init__(self) + + conn = get_agent_connection() + if not conn: + return + self._connect(conn) + + def close(self): + """ + Close the SSH agent connection. + """ + self._close() + + +class AgentKey(PKey): + """ + Private key held in a local SSH agent. This type of key can be used for + authenticating to a remote server (signing). Most other key operations + work as expected. + + .. versionchanged:: 3.2 + Added the ``comment`` kwarg and attribute. + + .. versionchanged:: 3.2 + Added the ``.inner_key`` attribute holding a reference to the 'real' + key instance this key is a proxy for, if one was obtainable, else None. + """ + + def __init__(self, agent, blob, comment=""): + self.agent = agent + self.blob = blob + self.comment = comment + msg = Message(blob) + self.name = msg.get_text() + self._logger = get_logger(__file__) + self.inner_key = None + try: + self.inner_key = PKey.from_type_string( + key_type=self.name, key_bytes=blob + ) + except UnknownKeyType: + # Log, but don't explode, since inner_key is a best-effort thing. + err = "Unable to derive inner_key for agent key of type {!r}" + self.log(DEBUG, err.format(self.name)) + + def log(self, *args, **kwargs): + return self._logger.log(*args, **kwargs) + + def asbytes(self): + # Prefer inner_key.asbytes, since that will differ for eg RSA-CERT + return self.inner_key.asbytes() if self.inner_key else self.blob + + def get_name(self): + return self.name + + def get_bits(self): + # Have to work around PKey's default get_bits being crap + if self.inner_key is not None: + return self.inner_key.get_bits() + return super().get_bits() + + def __getattr__(self, name): + """ + Proxy any un-implemented methods/properties to the inner_key. + """ + if self.inner_key is None: # nothing to proxy to + raise AttributeError(name) + return getattr(self.inner_key, name) + + @property + def _fields(self): + fallback = [self.get_name(), self.blob] + return self.inner_key._fields if self.inner_key else fallback + + def sign_ssh_data(self, data, algorithm=None): + msg = Message() + msg.add_byte(cSSH2_AGENTC_SIGN_REQUEST) + # NOTE: this used to be just self.blob, which is not entirely right for + # RSA-CERT 'keys' - those end up always degrading to ssh-rsa type + # signatures, for reasons probably internal to OpenSSH's agent code, + # even if everything else wants SHA2 (including our flag map). + msg.add_string(self.asbytes()) + msg.add_string(data) + msg.add_int(ALGORITHM_FLAG_MAP.get(algorithm, 0)) + ptype, result = self.agent._send_message(msg) + if ptype != SSH2_AGENT_SIGN_RESPONSE: + raise SSHException("key cannot be used for signing") + return result.get_binary() diff --git a/.venv/lib/python3.9/site-packages/paramiko/auth_handler.py b/.venv/lib/python3.9/site-packages/paramiko/auth_handler.py new file mode 100644 index 0000000..bc7f298 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/auth_handler.py @@ -0,0 +1,1092 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +`.AuthHandler` +""" + +import weakref +import threading +import time +import re + +from paramiko.common import ( + cMSG_SERVICE_REQUEST, + cMSG_DISCONNECT, + DISCONNECT_SERVICE_NOT_AVAILABLE, + DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE, + cMSG_USERAUTH_REQUEST, + cMSG_SERVICE_ACCEPT, + DEBUG, + AUTH_SUCCESSFUL, + INFO, + cMSG_USERAUTH_SUCCESS, + cMSG_USERAUTH_FAILURE, + AUTH_PARTIALLY_SUCCESSFUL, + cMSG_USERAUTH_INFO_REQUEST, + WARNING, + AUTH_FAILED, + cMSG_USERAUTH_PK_OK, + cMSG_USERAUTH_INFO_RESPONSE, + MSG_SERVICE_REQUEST, + MSG_SERVICE_ACCEPT, + MSG_USERAUTH_REQUEST, + MSG_USERAUTH_SUCCESS, + MSG_USERAUTH_FAILURE, + MSG_USERAUTH_BANNER, + MSG_USERAUTH_INFO_REQUEST, + MSG_USERAUTH_INFO_RESPONSE, + cMSG_USERAUTH_GSSAPI_RESPONSE, + cMSG_USERAUTH_GSSAPI_TOKEN, + cMSG_USERAUTH_GSSAPI_MIC, + MSG_USERAUTH_GSSAPI_RESPONSE, + MSG_USERAUTH_GSSAPI_TOKEN, + MSG_USERAUTH_GSSAPI_ERROR, + MSG_USERAUTH_GSSAPI_ERRTOK, + MSG_USERAUTH_GSSAPI_MIC, + MSG_NAMES, + cMSG_USERAUTH_BANNER, +) +from paramiko.message import Message +from paramiko.util import b, u +from paramiko.ssh_exception import ( + SSHException, + AuthenticationException, + BadAuthenticationType, + PartialAuthentication, +) +from paramiko.server import InteractiveQuery +from paramiko.ssh_gss import GSSAuth, GSS_EXCEPTIONS + + +class AuthHandler: + """ + Internal class to handle the mechanics of authentication. + """ + + def __init__(self, transport): + self.transport = weakref.proxy(transport) + self.username = None + self.authenticated = False + self.auth_event = None + self.auth_method = "" + self.banner = None + self.password = None + self.private_key = None + self.interactive_handler = None + self.submethods = None + # for server mode: + self.auth_username = None + self.auth_fail_count = 0 + # for GSSAPI + self.gss_host = None + self.gss_deleg_creds = True + + def _log(self, *args): + return self.transport._log(*args) + + def is_authenticated(self): + return self.authenticated + + def get_username(self): + if self.transport.server_mode: + return self.auth_username + else: + return self.username + + def auth_none(self, username, event): + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "none" + self.username = username + self._request_auth() + finally: + self.transport.lock.release() + + def auth_publickey(self, username, key, event): + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "publickey" + self.username = username + self.private_key = key + self._request_auth() + finally: + self.transport.lock.release() + + def auth_password(self, username, password, event): + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "password" + self.username = username + self.password = password + self._request_auth() + finally: + self.transport.lock.release() + + def auth_interactive(self, username, handler, event, submethods=""): + """ + response_list = handler(title, instructions, prompt_list) + """ + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "keyboard-interactive" + self.username = username + self.interactive_handler = handler + self.submethods = submethods + self._request_auth() + finally: + self.transport.lock.release() + + def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds, event): + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "gssapi-with-mic" + self.username = username + self.gss_host = gss_host + self.gss_deleg_creds = gss_deleg_creds + self._request_auth() + finally: + self.transport.lock.release() + + def auth_gssapi_keyex(self, username, event): + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "gssapi-keyex" + self.username = username + self._request_auth() + finally: + self.transport.lock.release() + + def abort(self): + if self.auth_event is not None: + self.auth_event.set() + + # ...internals... + + def _request_auth(self): + m = Message() + m.add_byte(cMSG_SERVICE_REQUEST) + m.add_string("ssh-userauth") + self.transport._send_message(m) + + def _disconnect_service_not_available(self): + m = Message() + m.add_byte(cMSG_DISCONNECT) + m.add_int(DISCONNECT_SERVICE_NOT_AVAILABLE) + m.add_string("Service not available") + m.add_string("en") + self.transport._send_message(m) + self.transport.close() + + def _disconnect_no_more_auth(self): + m = Message() + m.add_byte(cMSG_DISCONNECT) + m.add_int(DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE) + m.add_string("No more auth methods available") + m.add_string("en") + self.transport._send_message(m) + self.transport.close() + + def _get_key_type_and_bits(self, key): + """ + Given any key, return its type/algorithm & bits-to-sign. + + Intended for input to or verification of, key signatures. + """ + # Use certificate contents, if available, plain pubkey otherwise + if key.public_blob: + return key.public_blob.key_type, key.public_blob.key_blob + else: + return key.get_name(), key + + def _get_session_blob(self, key, service, username, algorithm): + m = Message() + m.add_string(self.transport.session_id) + m.add_byte(cMSG_USERAUTH_REQUEST) + m.add_string(username) + m.add_string(service) + m.add_string("publickey") + m.add_boolean(True) + _, bits = self._get_key_type_and_bits(key) + m.add_string(algorithm) + m.add_string(bits) + return m.asbytes() + + def wait_for_response(self, event): + max_ts = None + if self.transport.auth_timeout is not None: + max_ts = time.time() + self.transport.auth_timeout + while True: + event.wait(0.1) + if not self.transport.is_active(): + e = self.transport.get_exception() + if (e is None) or issubclass(e.__class__, EOFError): + e = AuthenticationException( + "Authentication failed: transport shut down or saw EOF" + ) + raise e + if event.is_set(): + break + if max_ts is not None and max_ts <= time.time(): + raise AuthenticationException("Authentication timeout.") + + if not self.is_authenticated(): + e = self.transport.get_exception() + if e is None: + e = AuthenticationException("Authentication failed.") + # this is horrible. Python Exception isn't yet descended from + # object, so type(e) won't work. :( + # TODO 4.0: lol. just lmao. + if issubclass(e.__class__, PartialAuthentication): + return e.allowed_types + raise e + return [] + + def _parse_service_request(self, m): + service = m.get_text() + if self.transport.server_mode and (service == "ssh-userauth"): + # accepted + m = Message() + m.add_byte(cMSG_SERVICE_ACCEPT) + m.add_string(service) + self.transport._send_message(m) + banner, language = self.transport.server_object.get_banner() + if banner: + m = Message() + m.add_byte(cMSG_USERAUTH_BANNER) + m.add_string(banner) + m.add_string(language) + self.transport._send_message(m) + return + # dunno this one + self._disconnect_service_not_available() + + def _generate_key_from_request(self, algorithm, keyblob): + # For use in server mode. + options = self.transport.preferred_pubkeys + if algorithm.replace("-cert-v01@openssh.com", "") not in options: + err = ( + "Auth rejected: pubkey algorithm '{}' unsupported or disabled" + ) + self._log(INFO, err.format(algorithm)) + return None + return self.transport._key_info[algorithm](Message(keyblob)) + + def _choose_fallback_pubkey_algorithm(self, key_type, my_algos): + # Fallback: first one in our (possibly tweaked by caller) list + pubkey_algo = my_algos[0] + msg = "Server did not send a server-sig-algs list; defaulting to our first preferred algo ({!r})" # noqa + self._log(DEBUG, msg.format(pubkey_algo)) + self._log( + DEBUG, + "NOTE: you may use the 'disabled_algorithms' SSHClient/Transport init kwarg to disable that or other algorithms if your server does not support them!", # noqa + ) + return pubkey_algo + + def _finalize_pubkey_algorithm(self, key_type): + # Short-circuit for non-RSA keys + if "rsa" not in key_type: + return key_type + self._log( + DEBUG, + "Finalizing pubkey algorithm for key of type {!r}".format( + key_type + ), + ) + # NOTE re #2017: When the key is an RSA cert and the remote server is + # OpenSSH 7.7 or earlier, always use ssh-rsa-cert-v01@openssh.com. + # Those versions of the server won't support rsa-sha2 family sig algos + # for certs specifically, and in tandem with various server bugs + # regarding server-sig-algs, it's impossible to fit this into the rest + # of the logic here. + if key_type.endswith("-cert-v01@openssh.com") and re.search( + r"-OpenSSH_(?:[1-6]|7\.[0-7])", self.transport.remote_version + ): + pubkey_algo = "ssh-rsa-cert-v01@openssh.com" + self.transport._agreed_pubkey_algorithm = pubkey_algo + self._log(DEBUG, "OpenSSH<7.8 + RSA cert = forcing ssh-rsa!") + self._log( + DEBUG, "Agreed upon {!r} pubkey algorithm".format(pubkey_algo) + ) + return pubkey_algo + # Normal attempts to handshake follow from here. + # Only consider RSA algos from our list, lest we agree on another! + my_algos = [x for x in self.transport.preferred_pubkeys if "rsa" in x] + self._log(DEBUG, "Our pubkey algorithm list: {}".format(my_algos)) + # Short-circuit negatively if user disabled all RSA algos (heh) + if not my_algos: + raise SSHException( + "An RSA key was specified, but no RSA pubkey algorithms are configured!" # noqa + ) + # Check for server-sig-algs if supported & sent + server_algo_str = u( + self.transport.server_extensions.get("server-sig-algs", b("")) + ) + pubkey_algo = None + # Prefer to match against server-sig-algs + if server_algo_str: + server_algos = server_algo_str.split(",") + self._log( + DEBUG, "Server-side algorithm list: {}".format(server_algos) + ) + # Only use algos from our list that the server likes, in our own + # preference order. (NOTE: purposefully using same style as in + # Transport...expect to refactor later) + agreement = list(filter(server_algos.__contains__, my_algos)) + if agreement: + pubkey_algo = agreement[0] + self._log( + DEBUG, + "Agreed upon {!r} pubkey algorithm".format(pubkey_algo), + ) + else: + self._log(DEBUG, "No common pubkey algorithms exist! Dying.") + # TODO: MAY want to use IncompatiblePeer again here but that's + # technically for initial key exchange, not pubkey auth. + err = "Unable to agree on a pubkey algorithm for signing a {!r} key!" # noqa + raise AuthenticationException(err.format(key_type)) + # Fallback to something based purely on the key & our configuration + else: + pubkey_algo = self._choose_fallback_pubkey_algorithm( + key_type, my_algos + ) + if key_type.endswith("-cert-v01@openssh.com"): + pubkey_algo += "-cert-v01@openssh.com" + self.transport._agreed_pubkey_algorithm = pubkey_algo + return pubkey_algo + + def _parse_service_accept(self, m): + service = m.get_text() + if service == "ssh-userauth": + self._log(DEBUG, "userauth is OK") + m = Message() + m.add_byte(cMSG_USERAUTH_REQUEST) + m.add_string(self.username) + m.add_string("ssh-connection") + m.add_string(self.auth_method) + if self.auth_method == "password": + m.add_boolean(False) + password = b(self.password) + m.add_string(password) + elif self.auth_method == "publickey": + m.add_boolean(True) + key_type, bits = self._get_key_type_and_bits(self.private_key) + algorithm = self._finalize_pubkey_algorithm(key_type) + m.add_string(algorithm) + m.add_string(bits) + blob = self._get_session_blob( + self.private_key, + "ssh-connection", + self.username, + algorithm, + ) + sig = self.private_key.sign_ssh_data(blob, algorithm) + m.add_string(sig) + elif self.auth_method == "keyboard-interactive": + m.add_string("") + m.add_string(self.submethods) + elif self.auth_method == "gssapi-with-mic": + sshgss = GSSAuth(self.auth_method, self.gss_deleg_creds) + m.add_bytes(sshgss.ssh_gss_oids()) + # send the supported GSSAPI OIDs to the server + self.transport._send_message(m) + ptype, m = self.transport.packetizer.read_message() + if ptype == MSG_USERAUTH_BANNER: + self._parse_userauth_banner(m) + ptype, m = self.transport.packetizer.read_message() + if ptype == MSG_USERAUTH_GSSAPI_RESPONSE: + # Read the mechanism selected by the server. We send just + # the Kerberos V5 OID, so the server can only respond with + # this OID. + mech = m.get_string() + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN) + try: + m.add_string( + sshgss.ssh_init_sec_context( + self.gss_host, mech, self.username + ) + ) + except GSS_EXCEPTIONS as e: + return self._handle_local_gss_failure(e) + self.transport._send_message(m) + while True: + ptype, m = self.transport.packetizer.read_message() + if ptype == MSG_USERAUTH_GSSAPI_TOKEN: + srv_token = m.get_string() + try: + next_token = sshgss.ssh_init_sec_context( + self.gss_host, + mech, + self.username, + srv_token, + ) + except GSS_EXCEPTIONS as e: + return self._handle_local_gss_failure(e) + # After this step the GSSAPI should not return any + # token. If it does, we keep sending the token to + # the server until no more token is returned. + if next_token is None: + break + else: + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN) + m.add_string(next_token) + self.transport.send_message(m) + else: + raise SSHException( + "Received Package: {}".format(MSG_NAMES[ptype]) + ) + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_MIC) + # send the MIC to the server + m.add_string(sshgss.ssh_get_mic(self.transport.session_id)) + elif ptype == MSG_USERAUTH_GSSAPI_ERRTOK: + # RFC 4462 says we are not required to implement GSS-API + # error messages. + # See RFC 4462 Section 3.8 in + # http://www.ietf.org/rfc/rfc4462.txt + raise SSHException("Server returned an error token") + elif ptype == MSG_USERAUTH_GSSAPI_ERROR: + maj_status = m.get_int() + min_status = m.get_int() + err_msg = m.get_string() + m.get_string() # Lang tag - discarded + raise SSHException( + """GSS-API Error: +Major Status: {} +Minor Status: {} +Error Message: {} +""".format( + maj_status, min_status, err_msg + ) + ) + elif ptype == MSG_USERAUTH_FAILURE: + self._parse_userauth_failure(m) + return + else: + raise SSHException( + "Received Package: {}".format(MSG_NAMES[ptype]) + ) + elif ( + self.auth_method == "gssapi-keyex" + and self.transport.gss_kex_used + ): + kexgss = self.transport.kexgss_ctxt + kexgss.set_username(self.username) + mic_token = kexgss.ssh_get_mic(self.transport.session_id) + m.add_string(mic_token) + elif self.auth_method == "none": + pass + else: + raise SSHException( + 'Unknown auth method "{}"'.format(self.auth_method) + ) + self.transport._send_message(m) + else: + self._log( + DEBUG, 'Service request "{}" accepted (?)'.format(service) + ) + + def _send_auth_result(self, username, method, result): + # okay, send result + m = Message() + if result == AUTH_SUCCESSFUL: + self._log(INFO, "Auth granted ({}).".format(method)) + m.add_byte(cMSG_USERAUTH_SUCCESS) + self.authenticated = True + else: + self._log(INFO, "Auth rejected ({}).".format(method)) + m.add_byte(cMSG_USERAUTH_FAILURE) + m.add_string( + self.transport.server_object.get_allowed_auths(username) + ) + if result == AUTH_PARTIALLY_SUCCESSFUL: + m.add_boolean(True) + else: + m.add_boolean(False) + self.auth_fail_count += 1 + self.transport._send_message(m) + if self.auth_fail_count >= 10: + self._disconnect_no_more_auth() + if result == AUTH_SUCCESSFUL: + self.transport._auth_trigger() + + def _interactive_query(self, q): + # make interactive query instead of response + m = Message() + m.add_byte(cMSG_USERAUTH_INFO_REQUEST) + m.add_string(q.name) + m.add_string(q.instructions) + m.add_string(bytes()) + m.add_int(len(q.prompts)) + for p in q.prompts: + m.add_string(p[0]) + m.add_boolean(p[1]) + self.transport._send_message(m) + + def _parse_userauth_request(self, m): + if not self.transport.server_mode: + # er, uh... what? + m = Message() + m.add_byte(cMSG_USERAUTH_FAILURE) + m.add_string("none") + m.add_boolean(False) + self.transport._send_message(m) + return + if self.authenticated: + # ignore + return + username = m.get_text() + service = m.get_text() + method = m.get_text() + self._log( + DEBUG, + "Auth request (type={}) service={}, username={}".format( + method, service, username + ), + ) + if service != "ssh-connection": + self._disconnect_service_not_available() + return + if (self.auth_username is not None) and ( + self.auth_username != username + ): + self._log( + WARNING, + "Auth rejected because the client attempted to change username in mid-flight", # noqa + ) + self._disconnect_no_more_auth() + return + self.auth_username = username + # check if GSS-API authentication is enabled + gss_auth = self.transport.server_object.enable_auth_gssapi() + + if method == "none": + result = self.transport.server_object.check_auth_none(username) + elif method == "password": + changereq = m.get_boolean() + password = m.get_binary() + try: + password = password.decode("UTF-8") + except UnicodeError: + # some clients/servers expect non-utf-8 passwords! + # in this case, just return the raw byte string. + pass + if changereq: + # always treated as failure, since we don't support changing + # passwords, but collect the list of valid auth types from + # the callback anyway + self._log(DEBUG, "Auth request to change passwords (rejected)") + newpassword = m.get_binary() + try: + newpassword = newpassword.decode("UTF-8", "replace") + except UnicodeError: + pass + result = AUTH_FAILED + else: + result = self.transport.server_object.check_auth_password( + username, password + ) + elif method == "publickey": + sig_attached = m.get_boolean() + # NOTE: server never wants to guess a client's algo, they're + # telling us directly. No need for _finalize_pubkey_algorithm + # anywhere in this flow. + algorithm = m.get_text() + keyblob = m.get_binary() + try: + key = self._generate_key_from_request(algorithm, keyblob) + except SSHException as e: + self._log(INFO, "Auth rejected: public key: {}".format(str(e))) + key = None + except Exception as e: + msg = "Auth rejected: unsupported or mangled public key ({}: {})" # noqa + self._log(INFO, msg.format(e.__class__.__name__, e)) + key = None + if key is None: + self._disconnect_no_more_auth() + return + # first check if this key is okay... if not, we can skip the verify + result = self.transport.server_object.check_auth_publickey( + username, key + ) + if result != AUTH_FAILED: + # key is okay, verify it + if not sig_attached: + # client wants to know if this key is acceptable, before it + # signs anything... send special "ok" message + m = Message() + m.add_byte(cMSG_USERAUTH_PK_OK) + m.add_string(algorithm) + m.add_string(keyblob) + self.transport._send_message(m) + return + sig = Message(m.get_binary()) + blob = self._get_session_blob( + key, service, username, algorithm + ) + if not key.verify_ssh_sig(blob, sig): + self._log(INFO, "Auth rejected: invalid signature") + result = AUTH_FAILED + elif method == "keyboard-interactive": + submethods = m.get_string() + result = self.transport.server_object.check_auth_interactive( + username, submethods + ) + if isinstance(result, InteractiveQuery): + # make interactive query instead of response + self._interactive_query(result) + return + elif method == "gssapi-with-mic" and gss_auth: + sshgss = GSSAuth(method) + # Read the number of OID mechanisms supported by the client. + # OpenSSH sends just one OID. It's the Kerveros V5 OID and that's + # the only OID we support. + mechs = m.get_int() + # We can't accept more than one OID, so if the SSH client sends + # more than one, disconnect. + if mechs > 1: + self._log( + INFO, + "Disconnect: Received more than one GSS-API OID mechanism", + ) + self._disconnect_no_more_auth() + desired_mech = m.get_string() + mech_ok = sshgss.ssh_check_mech(desired_mech) + # if we don't support the mechanism, disconnect. + if not mech_ok: + self._log( + INFO, + "Disconnect: Received an invalid GSS-API OID mechanism", + ) + self._disconnect_no_more_auth() + # send the Kerberos V5 GSSAPI OID to the client + supported_mech = sshgss.ssh_gss_oids("server") + # RFC 4462 says we are not required to implement GSS-API error + # messages. See section 3.8 in http://www.ietf.org/rfc/rfc4462.txt + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_RESPONSE) + m.add_bytes(supported_mech) + self.transport.auth_handler = GssapiWithMicAuthHandler( + self, sshgss + ) + self.transport._expected_packet = ( + MSG_USERAUTH_GSSAPI_TOKEN, + MSG_USERAUTH_REQUEST, + MSG_SERVICE_REQUEST, + ) + self.transport._send_message(m) + return + elif method == "gssapi-keyex" and gss_auth: + mic_token = m.get_string() + sshgss = self.transport.kexgss_ctxt + if sshgss is None: + # If there is no valid context, we reject the authentication + result = AUTH_FAILED + self._send_auth_result(username, method, result) + try: + sshgss.ssh_check_mic( + mic_token, self.transport.session_id, self.auth_username + ) + except Exception: + result = AUTH_FAILED + self._send_auth_result(username, method, result) + raise + result = AUTH_SUCCESSFUL + self.transport.server_object.check_auth_gssapi_keyex( + username, result + ) + else: + result = self.transport.server_object.check_auth_none(username) + # okay, send result + self._send_auth_result(username, method, result) + + def _parse_userauth_success(self, m): + self._log( + INFO, "Authentication ({}) successful!".format(self.auth_method) + ) + self.authenticated = True + self.transport._auth_trigger() + if self.auth_event is not None: + self.auth_event.set() + + def _parse_userauth_failure(self, m): + authlist = m.get_list() + # TODO 4.0: we aren't giving callers access to authlist _unless_ it's + # partial authentication, so eg authtype=none can't work unless we + # tweak this. + partial = m.get_boolean() + if partial: + self._log(INFO, "Authentication continues...") + self._log(DEBUG, "Methods: " + str(authlist)) + self.transport.saved_exception = PartialAuthentication(authlist) + elif self.auth_method not in authlist: + for msg in ( + "Authentication type ({}) not permitted.".format( + self.auth_method + ), + "Allowed methods: {}".format(authlist), + ): + self._log(DEBUG, msg) + self.transport.saved_exception = BadAuthenticationType( + "Bad authentication type", authlist + ) + else: + self._log( + INFO, "Authentication ({}) failed.".format(self.auth_method) + ) + self.authenticated = False + self.username = None + if self.auth_event is not None: + self.auth_event.set() + + def _parse_userauth_banner(self, m): + banner = m.get_string() + self.banner = banner + self._log(INFO, "Auth banner: {}".format(banner)) + # who cares. + + def _parse_userauth_info_request(self, m): + if self.auth_method != "keyboard-interactive": + raise SSHException("Illegal info request from server") + title = m.get_text() + instructions = m.get_text() + m.get_binary() # lang + prompts = m.get_int() + prompt_list = [] + for i in range(prompts): + prompt_list.append((m.get_text(), m.get_boolean())) + response_list = self.interactive_handler( + title, instructions, prompt_list + ) + + m = Message() + m.add_byte(cMSG_USERAUTH_INFO_RESPONSE) + m.add_int(len(response_list)) + for r in response_list: + m.add_string(r) + self.transport._send_message(m) + + def _parse_userauth_info_response(self, m): + if not self.transport.server_mode: + raise SSHException("Illegal info response from server") + n = m.get_int() + responses = [] + for i in range(n): + responses.append(m.get_text()) + result = self.transport.server_object.check_auth_interactive_response( + responses + ) + if isinstance(result, InteractiveQuery): + # make interactive query instead of response + self._interactive_query(result) + return + self._send_auth_result( + self.auth_username, "keyboard-interactive", result + ) + + def _handle_local_gss_failure(self, e): + self.transport.saved_exception = e + self._log(DEBUG, "GSSAPI failure: {}".format(e)) + self._log(INFO, "Authentication ({}) failed.".format(self.auth_method)) + self.authenticated = False + self.username = None + if self.auth_event is not None: + self.auth_event.set() + return + + # TODO 4.0: MAY make sense to make these tables into actual + # classes/instances that can be fed a mode bool or whatever. Or, + # alternately (both?) make the message types small classes or enums that + # embed this info within themselves (which could also then tidy up the + # current 'integer -> human readable short string' stuff in common.py). + # TODO: if we do that, also expose 'em publicly. + + # Messages which should be handled _by_ servers (sent by clients) + @property + def _server_handler_table(self): + return { + # TODO 4.0: MSG_SERVICE_REQUEST ought to eventually move into + # Transport's server mode like the client side did, just for + # consistency. + MSG_SERVICE_REQUEST: self._parse_service_request, + MSG_USERAUTH_REQUEST: self._parse_userauth_request, + MSG_USERAUTH_INFO_RESPONSE: self._parse_userauth_info_response, + } + + # Messages which should be handled _by_ clients (sent by servers) + @property + def _client_handler_table(self): + return { + MSG_SERVICE_ACCEPT: self._parse_service_accept, + MSG_USERAUTH_SUCCESS: self._parse_userauth_success, + MSG_USERAUTH_FAILURE: self._parse_userauth_failure, + MSG_USERAUTH_BANNER: self._parse_userauth_banner, + MSG_USERAUTH_INFO_REQUEST: self._parse_userauth_info_request, + } + + # NOTE: prior to the fix for #1283, this was a static dict instead of a + # property. Should be backwards compatible in most/all cases. + @property + def _handler_table(self): + if self.transport.server_mode: + return self._server_handler_table + else: + return self._client_handler_table + + +class GssapiWithMicAuthHandler: + """A specialized Auth handler for gssapi-with-mic + + During the GSSAPI token exchange we need a modified dispatch table, + because the packet type numbers are not unique. + """ + + method = "gssapi-with-mic" + + def __init__(self, delegate, sshgss): + self._delegate = delegate + self.sshgss = sshgss + + def abort(self): + self._restore_delegate_auth_handler() + return self._delegate.abort() + + @property + def transport(self): + return self._delegate.transport + + @property + def _send_auth_result(self): + return self._delegate._send_auth_result + + @property + def auth_username(self): + return self._delegate.auth_username + + @property + def gss_host(self): + return self._delegate.gss_host + + def _restore_delegate_auth_handler(self): + self.transport.auth_handler = self._delegate + + def _parse_userauth_gssapi_token(self, m): + client_token = m.get_string() + # use the client token as input to establish a secure + # context. + sshgss = self.sshgss + try: + token = sshgss.ssh_accept_sec_context( + self.gss_host, client_token, self.auth_username + ) + except Exception as e: + self.transport.saved_exception = e + result = AUTH_FAILED + self._restore_delegate_auth_handler() + self._send_auth_result(self.auth_username, self.method, result) + raise + if token is not None: + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN) + m.add_string(token) + self.transport._expected_packet = ( + MSG_USERAUTH_GSSAPI_TOKEN, + MSG_USERAUTH_GSSAPI_MIC, + MSG_USERAUTH_REQUEST, + ) + self.transport._send_message(m) + + def _parse_userauth_gssapi_mic(self, m): + mic_token = m.get_string() + sshgss = self.sshgss + username = self.auth_username + self._restore_delegate_auth_handler() + try: + sshgss.ssh_check_mic( + mic_token, self.transport.session_id, username + ) + except Exception as e: + self.transport.saved_exception = e + result = AUTH_FAILED + self._send_auth_result(username, self.method, result) + raise + # TODO: Implement client credential saving. + # The OpenSSH server is able to create a TGT with the delegated + # client credentials, but this is not supported by GSS-API. + result = AUTH_SUCCESSFUL + self.transport.server_object.check_auth_gssapi_with_mic( + username, result + ) + # okay, send result + self._send_auth_result(username, self.method, result) + + def _parse_service_request(self, m): + self._restore_delegate_auth_handler() + return self._delegate._parse_service_request(m) + + def _parse_userauth_request(self, m): + self._restore_delegate_auth_handler() + return self._delegate._parse_userauth_request(m) + + __handler_table = { + MSG_SERVICE_REQUEST: _parse_service_request, + MSG_USERAUTH_REQUEST: _parse_userauth_request, + MSG_USERAUTH_GSSAPI_TOKEN: _parse_userauth_gssapi_token, + MSG_USERAUTH_GSSAPI_MIC: _parse_userauth_gssapi_mic, + } + + @property + def _handler_table(self): + # TODO: determine if we can cut this up like we did for the primary + # AuthHandler class. + return self.__handler_table + + +class AuthOnlyHandler(AuthHandler): + """ + AuthHandler, and just auth, no service requests! + + .. versionadded:: 3.2 + """ + + # NOTE: this purposefully duplicates some of the parent class in order to + # modernize, refactor, etc. The intent is that eventually we will collapse + # this one onto the parent in a backwards incompatible release. + + @property + def _client_handler_table(self): + my_table = super()._client_handler_table.copy() + del my_table[MSG_SERVICE_ACCEPT] + return my_table + + def send_auth_request(self, username, method, finish_message=None): + """ + Submit a userauth request message & wait for response. + + Performs the transport message send call, sets self.auth_event, and + will lock-n-block as necessary to both send, and wait for response to, + the USERAUTH_REQUEST. + + Most callers will want to supply a callback to ``finish_message``, + which accepts a Message ``m`` and may call mutator methods on it to add + more fields. + """ + # Store a few things for reference in handlers, including auth failure + # handler (which needs to know if we were using a bad method, etc) + self.auth_method = method + self.username = username + # Generic userauth request fields + m = Message() + m.add_byte(cMSG_USERAUTH_REQUEST) + m.add_string(username) + m.add_string("ssh-connection") + m.add_string(method) + # Caller usually has more to say, such as injecting password, key etc + finish_message(m) + # TODO 4.0: seems odd to have the client handle the lock and not + # Transport; that _may_ have been an artifact of allowing user + # threading event injection? Regardless, we don't want to move _this_ + # locking into Transport._send_message now, because lots of other + # untouched code also uses that method and we might end up + # double-locking (?) but 4.0 would be a good time to revisit. + with self.transport.lock: + self.transport._send_message(m) + # We have cut out the higher level event args, but self.auth_event is + # still required for self.wait_for_response to function correctly (it's + # the mechanism used by the auth success/failure handlers, the abort + # handler, and a few other spots like in gssapi. + # TODO: interestingly, wait_for_response itself doesn't actually + # enforce that its event argument and self.auth_event are the same... + self.auth_event = threading.Event() + return self.wait_for_response(self.auth_event) + + def auth_none(self, username): + return self.send_auth_request(username, "none") + + def auth_publickey(self, username, key): + key_type, bits = self._get_key_type_and_bits(key) + algorithm = self._finalize_pubkey_algorithm(key_type) + blob = self._get_session_blob( + key, + "ssh-connection", + username, + algorithm, + ) + + def finish(m): + # This field doesn't appear to be named, but is False when querying + # for permission (ie knowing whether to even prompt a user for + # passphrase, etc) or True when just going for it. Paramiko has + # never bothered with the former type of message, apparently. + m.add_boolean(True) + m.add_string(algorithm) + m.add_string(bits) + m.add_string(key.sign_ssh_data(blob, algorithm)) + + return self.send_auth_request(username, "publickey", finish) + + def auth_password(self, username, password): + def finish(m): + # Unnamed field that equates to "I am changing my password", which + # Paramiko clientside never supported and serverside only sort of + # supported. + m.add_boolean(False) + m.add_string(b(password)) + + return self.send_auth_request(username, "password", finish) + + def auth_interactive(self, username, handler, submethods=""): + """ + response_list = handler(title, instructions, prompt_list) + """ + # Unlike most siblings, this auth method _does_ require other + # superclass handlers (eg userauth info request) to understand + # what's going on, so we still set some self attributes. + self.auth_method = "keyboard_interactive" + self.interactive_handler = handler + + def finish(m): + # Empty string for deprecated language tag field, per RFC 4256: + # https://www.rfc-editor.org/rfc/rfc4256#section-3.1 + m.add_string("") + m.add_string(submethods) + + return self.send_auth_request(username, "keyboard-interactive", finish) + + # NOTE: not strictly 'auth only' related, but allows users to opt-in. + def _choose_fallback_pubkey_algorithm(self, key_type, my_algos): + msg = "Server did not send a server-sig-algs list; defaulting to something in our preferred algorithms list" # noqa + self._log(DEBUG, msg) + noncert_key_type = key_type.replace("-cert-v01@openssh.com", "") + if key_type in my_algos or noncert_key_type in my_algos: + actual = key_type if key_type in my_algos else noncert_key_type + msg = f"Current key type, {actual!r}, is in our preferred list; using that" # noqa + algo = actual + else: + algo = my_algos[0] + msg = f"{key_type!r} not in our list - trying first list item instead, {algo!r}" # noqa + self._log(DEBUG, msg) + return algo diff --git a/.venv/lib/python3.9/site-packages/paramiko/auth_strategy.py b/.venv/lib/python3.9/site-packages/paramiko/auth_strategy.py new file mode 100644 index 0000000..03c1d87 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/auth_strategy.py @@ -0,0 +1,306 @@ +""" +Modern, adaptable authentication machinery. + +Replaces certain parts of `.SSHClient`. For a concrete implementation, see the +``OpenSSHAuthStrategy`` class in `Fabric `_. +""" + +from collections import namedtuple + +from .agent import AgentKey +from .util import get_logger +from .ssh_exception import AuthenticationException + + +class AuthSource: + """ + Some SSH authentication source, such as a password, private key, or agent. + + See subclasses in this module for concrete implementations. + + All implementations must accept at least a ``username`` (``str``) kwarg. + """ + + def __init__(self, username): + self.username = username + + def _repr(self, **kwargs): + # TODO: are there any good libs for this? maybe some helper from + # structlog? + pairs = [f"{k}={v!r}" for k, v in kwargs.items()] + joined = ", ".join(pairs) + return f"{self.__class__.__name__}({joined})" + + def __repr__(self): + return self._repr() + + def authenticate(self, transport): + """ + Perform authentication. + """ + raise NotImplementedError + + +class NoneAuth(AuthSource): + """ + Auth type "none", ie https://www.rfc-editor.org/rfc/rfc4252#section-5.2 . + """ + + def authenticate(self, transport): + return transport.auth_none(self.username) + + +class Password(AuthSource): + """ + Password authentication. + + :param callable password_getter: + A lazy callable that should return a `str` password value at + authentication time, such as a `functools.partial` wrapping + `getpass.getpass`, an API call to a secrets store, or similar. + + If you already know the password at instantiation time, you should + simply use something like ``lambda: "my literal"`` (for a literal, but + also, shame on you!) or ``lambda: variable_name`` (for something stored + in a variable). + """ + + def __init__(self, username, password_getter): + super().__init__(username=username) + self.password_getter = password_getter + + def __repr__(self): + # Password auth is marginally more 'username-caring' than pkeys, so may + # as well log that info here. + return super()._repr(user=self.username) + + def authenticate(self, transport): + # Lazily get the password, in case it's prompting a user + # TODO: be nice to log source _of_ the password? + password = self.password_getter() + return transport.auth_password(self.username, password) + + +# TODO 4.0: twiddle this, or PKey, or both, so they're more obviously distinct. +# TODO 4.0: the obvious is to make this more wordy (PrivateKeyAuth), the +# minimalist approach might be to rename PKey to just Key (esp given all the +# subclasses are WhateverKey and not WhateverPKey) +class PrivateKey(AuthSource): + """ + Essentially a mixin for private keys. + + Knows how to auth, but leaves key material discovery/loading/decryption to + subclasses. + + Subclasses **must** ensure that they've set ``self.pkey`` to a decrypted + `.PKey` instance before calling ``super().authenticate``; typically + either in their ``__init__``, or in an overridden ``authenticate`` prior to + its `super` call. + """ + + def authenticate(self, transport): + return transport.auth_publickey(self.username, self.pkey) + + +class InMemoryPrivateKey(PrivateKey): + """ + An in-memory, decrypted `.PKey` object. + """ + + def __init__(self, username, pkey): + super().__init__(username=username) + # No decryption (presumably) necessary! + self.pkey = pkey + + def __repr__(self): + # NOTE: most of interesting repr-bits for private keys is in PKey. + # TODO: tacking on agent-ness like this is a bit awkward, but, eh? + rep = super()._repr(pkey=self.pkey) + if isinstance(self.pkey, AgentKey): + rep += " [agent]" + return rep + + +class OnDiskPrivateKey(PrivateKey): + """ + Some on-disk private key that needs opening and possibly decrypting. + + :param str source: + String tracking where this key's path was specified; should be one of + ``"ssh-config"``, ``"python-config"``, or ``"implicit-home"``. + :param Path path: + The filesystem path this key was loaded from. + :param PKey pkey: + The `PKey` object this auth source uses/represents. + """ + + def __init__(self, username, source, path, pkey): + super().__init__(username=username) + self.source = source + allowed = ("ssh-config", "python-config", "implicit-home") + if source not in allowed: + raise ValueError(f"source argument must be one of: {allowed!r}") + self.path = path + # Superclass wants .pkey, other two are mostly for display/debugging. + self.pkey = pkey + + def __repr__(self): + return self._repr( + key=self.pkey, source=self.source, path=str(self.path) + ) + + +# TODO re sources: is there anything in an OpenSSH config file that doesn't fit +# into what Paramiko already had kwargs for? + + +SourceResult = namedtuple("SourceResult", ["source", "result"]) + +# TODO: tempting to make this an OrderedDict, except the keys essentially want +# to be rich objects (AuthSources) which do not make for useful user indexing? +# TODO: members being vanilla tuples is pretty old-school/expedient; they +# "really" want to be something that's type friendlier (unless the tuple's 2nd +# member being a Union of two types is "fine"?), which I assume means yet more +# classes, eg an abstract SourceResult with concrete AuthSuccess and +# AuthFailure children? +# TODO: arguably we want __init__ typechecking of the members (or to leverage +# mypy by classifying this literally as list-of-AuthSource?) +class AuthResult(list): + """ + Represents a partial or complete SSH authentication attempt. + + This class conceptually extends `AuthStrategy` by pairing the former's + authentication **sources** with the **results** of trying to authenticate + with them. + + `AuthResult` is a (subclass of) `list` of `namedtuple`, which are of the + form ``namedtuple('SourceResult', 'source', 'result')`` (where the + ``source`` member is an `AuthSource` and the ``result`` member is either a + return value from the relevant `.Transport` method, or an exception + object). + + .. note:: + Transport auth method results are always themselves a ``list`` of "next + allowable authentication methods". + + In the simple case of "you just authenticated successfully", it's an + empty list; if your auth was rejected but you're allowed to try again, + it will be a list of string method names like ``pubkey`` or + ``password``. + + The ``__str__`` of this class represents the empty-list scenario as the + word ``success``, which should make reading the result of an + authentication session more obvious to humans. + + Instances also have a `strategy` attribute referencing the `AuthStrategy` + which was attempted. + """ + + def __init__(self, strategy, *args, **kwargs): + self.strategy = strategy + super().__init__(*args, **kwargs) + + def __str__(self): + # NOTE: meaningfully distinct from __repr__, which still wants to use + # superclass' implementation. + # TODO: go hog wild, use rich.Table? how is that on degraded term's? + # TODO: test this lol + return "\n".join( + f"{x.source} -> {x.result or 'success'}" for x in self + ) + + +# TODO 4.0: descend from SSHException or even just Exception +class AuthFailure(AuthenticationException): + """ + Basic exception wrapping an `AuthResult` indicating overall auth failure. + + Note that `AuthFailure` descends from `AuthenticationException` but is + generally "higher level"; the latter is now only raised by individual + `AuthSource` attempts and should typically only be seen by users when + encapsulated in this class. It subclasses `AuthenticationException` + primarily for backwards compatibility reasons. + """ + + def __init__(self, result): + self.result = result + + def __str__(self): + return "\n" + str(self.result) + + +class AuthStrategy: + """ + This class represents one or more attempts to auth with an SSH server. + + By default, subclasses must at least accept an ``ssh_config`` + (`.SSHConfig`) keyword argument, but may opt to accept more as needed for + their particular strategy. + """ + + def __init__( + self, + ssh_config, + ): + self.ssh_config = ssh_config + self.log = get_logger(__name__) + + def get_sources(self): + """ + Generator yielding `AuthSource` instances, in the order to try. + + This is the primary override point for subclasses: you figure out what + sources you need, and ``yield`` them. + + Subclasses _of_ subclasses may find themselves wanting to do things + like filtering or discarding around a call to `super`. + """ + raise NotImplementedError + + def authenticate(self, transport): + """ + Handles attempting `AuthSource` instances yielded from `get_sources`. + + You *normally* won't need to override this, but it's an option for + advanced users. + """ + succeeded = False + overall_result = AuthResult(strategy=self) + # TODO: arguably we could fit in a "send none auth, record allowed auth + # types sent back" thing here as OpenSSH-client does, but that likely + # wants to live in fabric.OpenSSHAuthStrategy as not all target servers + # will implement it! + # TODO: needs better "server told us too many attempts" checking! + for source in self.get_sources(): + self.log.debug(f"Trying {source}") + try: # NOTE: this really wants to _only_ wrap the authenticate()! + result = source.authenticate(transport) + succeeded = True + # TODO: 'except PartialAuthentication' is needed for 2FA and + # similar, as per old SSHClient.connect - it is the only way + # AuthHandler supplies access to the 'name-list' field from + # MSG_USERAUTH_FAILURE, at present. + except Exception as e: + result = e + # TODO: look at what this could possibly raise, we don't really + # want Exception here, right? just SSHException subclasses? or + # do we truly want to capture anything at all with assumption + # it's easy enough for users to look afterwards? + # NOTE: showing type, not message, for tersity & also most of + # the time it's basically just "Authentication failed." + source_class = e.__class__.__name__ + self.log.info( + f"Authentication via {source} failed with {source_class}" + ) + overall_result.append(SourceResult(source, result)) + if succeeded: + break + # Gotta die here if nothing worked, otherwise Transport's main loop + # just kinda hangs out until something times out! + if not succeeded: + raise AuthFailure(result=overall_result) + # Success: give back what was done, in case they care. + return overall_result + + # TODO: is there anything OpenSSH client does which _can't_ cleanly map to + # iterating a generator? diff --git a/.venv/lib/python3.9/site-packages/paramiko/ber.py b/.venv/lib/python3.9/site-packages/paramiko/ber.py new file mode 100644 index 0000000..b8287f5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/ber.py @@ -0,0 +1,139 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +from paramiko.common import max_byte, zero_byte, byte_ord, byte_chr + +import paramiko.util as util +from paramiko.util import b +from paramiko.sftp import int64 + + +class BERException(Exception): + pass + + +class BER: + """ + Robey's tiny little attempt at a BER decoder. + """ + + def __init__(self, content=bytes()): + self.content = b(content) + self.idx = 0 + + def asbytes(self): + return self.content + + def __str__(self): + return self.asbytes() + + def __repr__(self): + return "BER('" + repr(self.content) + "')" + + def decode(self): + return self.decode_next() + + def decode_next(self): + if self.idx >= len(self.content): + return None + ident = byte_ord(self.content[self.idx]) + self.idx += 1 + if (ident & 31) == 31: + # identifier > 30 + ident = 0 + while self.idx < len(self.content): + t = byte_ord(self.content[self.idx]) + self.idx += 1 + ident = (ident << 7) | (t & 0x7F) + if not (t & 0x80): + break + if self.idx >= len(self.content): + return None + # now fetch length + size = byte_ord(self.content[self.idx]) + self.idx += 1 + if size & 0x80: + # more complimicated... + # FIXME: theoretically should handle indefinite-length (0x80) + t = size & 0x7F + if self.idx + t > len(self.content): + return None + size = util.inflate_long( + self.content[self.idx : self.idx + t], True + ) + self.idx += t + if self.idx + size > len(self.content): + # can't fit + return None + data = self.content[self.idx : self.idx + size] + self.idx += size + # now switch on id + if ident == 0x30: + # sequence + return self.decode_sequence(data) + elif ident == 2: + # int + return util.inflate_long(data) + else: + # 1: boolean (00 false, otherwise true) + msg = "Unknown ber encoding type {:d} (robey is lazy)" + raise BERException(msg.format(ident)) + + @staticmethod + def decode_sequence(data): + out = [] + ber = BER(data) + while True: + x = ber.decode_next() + if x is None: + break + out.append(x) + return out + + def encode_tlv(self, ident, val): + # no need to support ident > 31 here + self.content += byte_chr(ident) + if len(val) > 0x7F: + lenstr = util.deflate_long(len(val)) + self.content += byte_chr(0x80 + len(lenstr)) + lenstr + else: + self.content += byte_chr(len(val)) + self.content += val + + def encode(self, x): + if type(x) is bool: + if x: + self.encode_tlv(1, max_byte) + else: + self.encode_tlv(1, zero_byte) + elif (type(x) is int) or (type(x) is int64): + self.encode_tlv(2, util.deflate_long(x)) + elif type(x) is str: + self.encode_tlv(4, x) + elif (type(x) is list) or (type(x) is tuple): + self.encode_tlv(0x30, self.encode_sequence(x)) + else: + raise BERException( + "Unknown type for encoding: {!r}".format(type(x)) + ) + + @staticmethod + def encode_sequence(data): + ber = BER() + for item in data: + ber.encode(item) + return ber.asbytes() diff --git a/.venv/lib/python3.9/site-packages/paramiko/buffered_pipe.py b/.venv/lib/python3.9/site-packages/paramiko/buffered_pipe.py new file mode 100644 index 0000000..c19279c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/buffered_pipe.py @@ -0,0 +1,212 @@ +# Copyright (C) 2006-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Attempt to generalize the "feeder" part of a `.Channel`: an object which can be +read from and closed, but is reading from a buffer fed by another thread. The +read operations are blocking and can have a timeout set. +""" + +import array +import threading +import time +from paramiko.util import b + + +class PipeTimeout(IOError): + """ + Indicates that a timeout was reached on a read from a `.BufferedPipe`. + """ + + pass + + +class BufferedPipe: + """ + A buffer that obeys normal read (with timeout) & close semantics for a + file or socket, but is fed data from another thread. This is used by + `.Channel`. + """ + + def __init__(self): + self._lock = threading.Lock() + self._cv = threading.Condition(self._lock) + self._event = None + self._buffer = array.array("B") + self._closed = False + + def _buffer_frombytes(self, data): + self._buffer.frombytes(data) + + def _buffer_tobytes(self, limit=None): + return self._buffer[:limit].tobytes() + + def set_event(self, event): + """ + Set an event on this buffer. When data is ready to be read (or the + buffer has been closed), the event will be set. When no data is + ready, the event will be cleared. + + :param threading.Event event: the event to set/clear + """ + self._lock.acquire() + try: + self._event = event + # Make sure the event starts in `set` state if we appear to already + # be closed; otherwise, if we start in `clear` state & are closed, + # nothing will ever call `.feed` and the event (& OS pipe, if we're + # wrapping one - see `Channel.fileno`) will permanently stay in + # `clear`, causing deadlock if e.g. `select`ed upon. + if self._closed or len(self._buffer) > 0: + event.set() + else: + event.clear() + finally: + self._lock.release() + + def feed(self, data): + """ + Feed new data into this pipe. This method is assumed to be called + from a separate thread, so synchronization is done. + + :param data: the data to add, as a ``str`` or ``bytes`` + """ + self._lock.acquire() + try: + if self._event is not None: + self._event.set() + self._buffer_frombytes(b(data)) + self._cv.notify_all() + finally: + self._lock.release() + + def read_ready(self): + """ + Returns true if data is buffered and ready to be read from this + feeder. A ``False`` result does not mean that the feeder has closed; + it means you may need to wait before more data arrives. + + :return: + ``True`` if a `read` call would immediately return at least one + byte; ``False`` otherwise. + """ + self._lock.acquire() + try: + if len(self._buffer) == 0: + return False + return True + finally: + self._lock.release() + + def read(self, nbytes, timeout=None): + """ + Read data from the pipe. The return value is a string representing + the data received. The maximum amount of data to be received at once + is specified by ``nbytes``. If a string of length zero is returned, + the pipe has been closed. + + The optional ``timeout`` argument can be a nonnegative float expressing + seconds, or ``None`` for no timeout. If a float is given, a + `.PipeTimeout` will be raised if the timeout period value has elapsed + before any data arrives. + + :param int nbytes: maximum number of bytes to read + :param float timeout: + maximum seconds to wait (or ``None``, the default, to wait forever) + :return: the read data, as a ``str`` or ``bytes`` + + :raises: + `.PipeTimeout` -- if a timeout was specified and no data was ready + before that timeout + """ + out = bytes() + self._lock.acquire() + try: + if len(self._buffer) == 0: + if self._closed: + return out + # should we block? + if timeout == 0.0: + raise PipeTimeout() + # loop here in case we get woken up but a different thread has + # grabbed everything in the buffer. + while (len(self._buffer) == 0) and not self._closed: + then = time.time() + self._cv.wait(timeout) + if timeout is not None: + timeout -= time.time() - then + if timeout <= 0.0: + raise PipeTimeout() + + # something's in the buffer and we have the lock! + if len(self._buffer) <= nbytes: + out = self._buffer_tobytes() + del self._buffer[:] + if (self._event is not None) and not self._closed: + self._event.clear() + else: + out = self._buffer_tobytes(nbytes) + del self._buffer[:nbytes] + finally: + self._lock.release() + + return out + + def empty(self): + """ + Clear out the buffer and return all data that was in it. + + :return: + any data that was in the buffer prior to clearing it out, as a + `str` + """ + self._lock.acquire() + try: + out = self._buffer_tobytes() + del self._buffer[:] + if (self._event is not None) and not self._closed: + self._event.clear() + return out + finally: + self._lock.release() + + def close(self): + """ + Close this pipe object. Future calls to `read` after the buffer + has been emptied will return immediately with an empty string. + """ + self._lock.acquire() + try: + self._closed = True + self._cv.notify_all() + if self._event is not None: + self._event.set() + finally: + self._lock.release() + + def __len__(self): + """ + Return the number of bytes buffered. + + :return: number (`int`) of bytes buffered + """ + self._lock.acquire() + try: + return len(self._buffer) + finally: + self._lock.release() diff --git a/.venv/lib/python3.9/site-packages/paramiko/channel.py b/.venv/lib/python3.9/site-packages/paramiko/channel.py new file mode 100644 index 0000000..25326ca --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/channel.py @@ -0,0 +1,1390 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Abstraction for an SSH2 channel. +""" + +import binascii +import os +import socket +import time +import threading + +from functools import wraps + +from paramiko import util +from paramiko.common import ( + cMSG_CHANNEL_REQUEST, + cMSG_CHANNEL_WINDOW_ADJUST, + cMSG_CHANNEL_DATA, + cMSG_CHANNEL_EXTENDED_DATA, + DEBUG, + ERROR, + cMSG_CHANNEL_SUCCESS, + cMSG_CHANNEL_FAILURE, + cMSG_CHANNEL_EOF, + cMSG_CHANNEL_CLOSE, +) +from paramiko.message import Message +from paramiko.ssh_exception import SSHException +from paramiko.file import BufferedFile +from paramiko.buffered_pipe import BufferedPipe, PipeTimeout +from paramiko import pipe +from paramiko.util import ClosingContextManager + + +def open_only(func): + """ + Decorator for `.Channel` methods which performs an openness check. + + :raises: + `.SSHException` -- If the wrapped method is called on an unopened + `.Channel`. + """ + + @wraps(func) + def _check(self, *args, **kwds): + if ( + self.closed + or self.eof_received + or self.eof_sent + or not self.active + ): + raise SSHException("Channel is not open") + return func(self, *args, **kwds) + + return _check + + +class Channel(ClosingContextManager): + """ + A secure tunnel across an SSH `.Transport`. A Channel is meant to behave + like a socket, and has an API that should be indistinguishable from the + Python socket API. + + Because SSH2 has a windowing kind of flow control, if you stop reading data + from a Channel and its buffer fills up, the server will be unable to send + you any more data until you read some of it. (This won't affect other + channels on the same transport -- all channels on a single transport are + flow-controlled independently.) Similarly, if the server isn't reading + data you send, calls to `send` may block, unless you set a timeout. This + is exactly like a normal network socket, so it shouldn't be too surprising. + + Instances of this class may be used as context managers. + """ + + def __init__(self, chanid): + """ + Create a new channel. The channel is not associated with any + particular session or `.Transport` until the Transport attaches it. + Normally you would only call this method from the constructor of a + subclass of `.Channel`. + + :param int chanid: + the ID of this channel, as passed by an existing `.Transport`. + """ + #: Channel ID + self.chanid = chanid + #: Remote channel ID + self.remote_chanid = 0 + #: `.Transport` managing this channel + self.transport = None + #: Whether the connection is presently active + self.active = False + self.eof_received = 0 + self.eof_sent = 0 + self.in_buffer = BufferedPipe() + self.in_stderr_buffer = BufferedPipe() + self.timeout = None + #: Whether the connection has been closed + self.closed = False + self.ultra_debug = False + self.lock = threading.Lock() + self.out_buffer_cv = threading.Condition(self.lock) + self.in_window_size = 0 + self.out_window_size = 0 + self.in_max_packet_size = 0 + self.out_max_packet_size = 0 + self.in_window_threshold = 0 + self.in_window_sofar = 0 + self.status_event = threading.Event() + self._name = str(chanid) + self.logger = util.get_logger("paramiko.transport") + self._pipe = None + self.event = threading.Event() + self.event_ready = False + self.combine_stderr = False + self.exit_status = -1 + self.origin_addr = None + + def __del__(self): + try: + self.close() + except: + pass + + def __repr__(self): + """ + Return a string representation of this object, for debugging. + """ + out = " 0: + out += " in-buffer={}".format(len(self.in_buffer)) + out += " -> " + repr(self.transport) + out += ">" + return out + + @open_only + def get_pty( + self, + term="vt100", + width=80, + height=24, + width_pixels=0, + height_pixels=0, + ): + """ + Request a pseudo-terminal from the server. This is usually used right + after creating a client channel, to ask the server to provide some + basic terminal semantics for a shell invoked with `invoke_shell`. + It isn't necessary (or desirable) to call this method if you're going + to execute a single command with `exec_command`. + + :param str term: the terminal type to emulate + (for example, ``'vt100'``) + :param int width: width (in characters) of the terminal screen + :param int height: height (in characters) of the terminal screen + :param int width_pixels: width (in pixels) of the terminal screen + :param int height_pixels: height (in pixels) of the terminal screen + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("pty-req") + m.add_boolean(True) + m.add_string(term) + m.add_int(width) + m.add_int(height) + m.add_int(width_pixels) + m.add_int(height_pixels) + m.add_string(bytes()) + self._event_pending() + self.transport._send_user_message(m) + self._wait_for_event() + + @open_only + def invoke_shell(self): + """ + Request an interactive shell session on this channel. If the server + allows it, the channel will then be directly connected to the stdin, + stdout, and stderr of the shell. + + Normally you would call `get_pty` before this, in which case the + shell will operate through the pty, and the channel will be connected + to the stdin and stdout of the pty. + + When the shell exits, the channel will be closed and can't be reused. + You must open a new channel if you wish to open another shell. + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("shell") + m.add_boolean(True) + self._event_pending() + self.transport._send_user_message(m) + self._wait_for_event() + + @open_only + def exec_command(self, command): + """ + Execute a command on the server. If the server allows it, the channel + will then be directly connected to the stdin, stdout, and stderr of + the command being executed. + + When the command finishes executing, the channel will be closed and + can't be reused. You must open a new channel if you wish to execute + another command. + + :param str command: a shell command to execute. + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("exec") + m.add_boolean(True) + m.add_string(command) + self._event_pending() + self.transport._send_user_message(m) + self._wait_for_event() + + @open_only + def invoke_subsystem(self, subsystem): + """ + Request a subsystem on the server (for example, ``sftp``). If the + server allows it, the channel will then be directly connected to the + requested subsystem. + + When the subsystem finishes, the channel will be closed and can't be + reused. + + :param str subsystem: name of the subsystem being requested. + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("subsystem") + m.add_boolean(True) + m.add_string(subsystem) + self._event_pending() + self.transport._send_user_message(m) + self._wait_for_event() + + @open_only + def resize_pty(self, width=80, height=24, width_pixels=0, height_pixels=0): + """ + Resize the pseudo-terminal. This can be used to change the width and + height of the terminal emulation created in a previous `get_pty` call. + + :param int width: new width (in characters) of the terminal screen + :param int height: new height (in characters) of the terminal screen + :param int width_pixels: new width (in pixels) of the terminal screen + :param int height_pixels: new height (in pixels) of the terminal screen + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("window-change") + m.add_boolean(False) + m.add_int(width) + m.add_int(height) + m.add_int(width_pixels) + m.add_int(height_pixels) + self.transport._send_user_message(m) + + @open_only + def update_environment(self, environment): + """ + Updates this channel's remote shell environment. + + .. note:: + This operation is additive - i.e. the current environment is not + reset before the given environment variables are set. + + .. warning:: + Servers may silently reject some environment variables; see the + warning in `set_environment_variable` for details. + + :param dict environment: + a dictionary containing the name and respective values to set + :raises: + `.SSHException` -- if any of the environment variables was rejected + by the server or the channel was closed + """ + for name, value in environment.items(): + try: + self.set_environment_variable(name, value) + except SSHException as e: + err = 'Failed to set environment variable "{}".' + raise SSHException(err.format(name), e) + + @open_only + def set_environment_variable(self, name, value): + """ + Set the value of an environment variable. + + .. warning:: + The server may reject this request depending on its ``AcceptEnv`` + setting; such rejections will fail silently (which is common client + practice for this particular request type). Make sure you + understand your server's configuration before using! + + :param str name: name of the environment variable + :param str value: value of the environment variable + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("env") + m.add_boolean(False) + m.add_string(name) + m.add_string(value) + self.transport._send_user_message(m) + + def exit_status_ready(self): + """ + Return true if the remote process has exited and returned an exit + status. You may use this to poll the process status if you don't + want to block in `recv_exit_status`. Note that the server may not + return an exit status in some cases (like bad servers). + + :return: + ``True`` if `recv_exit_status` will return immediately, else + ``False``. + + .. versionadded:: 1.7.3 + """ + return self.closed or self.status_event.is_set() + + def recv_exit_status(self): + """ + Return the exit status from the process on the server. This is + mostly useful for retrieving the results of an `exec_command`. + If the command hasn't finished yet, this method will wait until + it does, or until the channel is closed. If no exit status is + provided by the server, -1 is returned. + + .. warning:: + In some situations, receiving remote output larger than the current + `.Transport` or session's ``window_size`` (e.g. that set by the + ``default_window_size`` kwarg for `.Transport.__init__`) will cause + `.recv_exit_status` to hang indefinitely if it is called prior to a + sufficiently large `.Channel.recv` (or if there are no threads + calling `.Channel.recv` in the background). + + In these cases, ensuring that `.recv_exit_status` is called *after* + `.Channel.recv` (or, again, using threads) can avoid the hang. + + :return: the exit code (as an `int`) of the process on the server. + + .. versionadded:: 1.2 + """ + self.status_event.wait() + assert self.status_event.is_set() + return self.exit_status + + def send_exit_status(self, status): + """ + Send the exit status of an executed command to the client. (This + really only makes sense in server mode.) Many clients expect to + get some sort of status code back from an executed command after + it completes. + + :param int status: the exit code of the process + + .. versionadded:: 1.2 + """ + # in many cases, the channel will not still be open here. + # that's fine. + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("exit-status") + m.add_boolean(False) + m.add_int(status) + self.transport._send_user_message(m) + + @open_only + def request_x11( + self, + screen_number=0, + auth_protocol=None, + auth_cookie=None, + single_connection=False, + handler=None, + ): + """ + Request an x11 session on this channel. If the server allows it, + further x11 requests can be made from the server to the client, + when an x11 application is run in a shell session. + + From :rfc:`4254`:: + + It is RECOMMENDED that the 'x11 authentication cookie' that is + sent be a fake, random cookie, and that the cookie be checked and + replaced by the real cookie when a connection request is received. + + If you omit the auth_cookie, a new secure random 128-bit value will be + generated, used, and returned. You will need to use this value to + verify incoming x11 requests and replace them with the actual local + x11 cookie (which requires some knowledge of the x11 protocol). + + If a handler is passed in, the handler is called from another thread + whenever a new x11 connection arrives. The default handler queues up + incoming x11 connections, which may be retrieved using + `.Transport.accept`. The handler's calling signature is:: + + handler(channel: Channel, (address: str, port: int)) + + :param int screen_number: the x11 screen number (0, 10, etc.) + :param str auth_protocol: + the name of the X11 authentication method used; if none is given, + ``"MIT-MAGIC-COOKIE-1"`` is used + :param str auth_cookie: + hexadecimal string containing the x11 auth cookie; if none is + given, a secure random 128-bit value is generated + :param bool single_connection: + if True, only a single x11 connection will be forwarded (by + default, any number of x11 connections can arrive over this + session) + :param handler: + an optional callable handler to use for incoming X11 connections + :return: the auth_cookie used + """ + if auth_protocol is None: + auth_protocol = "MIT-MAGIC-COOKIE-1" + if auth_cookie is None: + auth_cookie = binascii.hexlify(os.urandom(16)) + + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("x11-req") + m.add_boolean(True) + m.add_boolean(single_connection) + m.add_string(auth_protocol) + m.add_string(auth_cookie) + m.add_int(screen_number) + self._event_pending() + self.transport._send_user_message(m) + self._wait_for_event() + self.transport._set_x11_handler(handler) + return auth_cookie + + @open_only + def request_forward_agent(self, handler): + """ + Request for a forward SSH Agent on this channel. + This is only valid for an ssh-agent from OpenSSH !!! + + :param handler: + a required callable handler to use for incoming SSH Agent + connections + + :return: True if we are ok, else False + (at that time we always return ok) + + :raises: SSHException in case of channel problem. + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("auth-agent-req@openssh.com") + m.add_boolean(False) + self.transport._send_user_message(m) + self.transport._set_forward_agent_handler(handler) + return True + + def get_transport(self): + """ + Return the `.Transport` associated with this channel. + """ + return self.transport + + def set_name(self, name): + """ + Set a name for this channel. Currently it's only used to set the name + of the channel in logfile entries. The name can be fetched with the + `get_name` method. + + :param str name: new channel name + """ + self._name = name + + def get_name(self): + """ + Get the name of this channel that was previously set by `set_name`. + """ + return self._name + + def get_id(self): + """ + Return the `int` ID # for this channel. + + The channel ID is unique across a `.Transport` and usually a small + number. It's also the number passed to + `.ServerInterface.check_channel_request` when determining whether to + accept a channel request in server mode. + """ + return self.chanid + + def set_combine_stderr(self, combine): + """ + Set whether stderr should be combined into stdout on this channel. + The default is ``False``, but in some cases it may be convenient to + have both streams combined. + + If this is ``False``, and `exec_command` is called (or ``invoke_shell`` + with no pty), output to stderr will not show up through the `recv` + and `recv_ready` calls. You will have to use `recv_stderr` and + `recv_stderr_ready` to get stderr output. + + If this is ``True``, data will never show up via `recv_stderr` or + `recv_stderr_ready`. + + :param bool combine: + ``True`` if stderr output should be combined into stdout on this + channel. + :return: the previous setting (a `bool`). + + .. versionadded:: 1.1 + """ + data = bytes() + self.lock.acquire() + try: + old = self.combine_stderr + self.combine_stderr = combine + if combine and not old: + # copy old stderr buffer into primary buffer + data = self.in_stderr_buffer.empty() + finally: + self.lock.release() + if len(data) > 0: + self._feed(data) + return old + + # ...socket API... + + def settimeout(self, timeout): + """ + Set a timeout on blocking read/write operations. The ``timeout`` + argument can be a nonnegative float expressing seconds, or ``None``. + If a float is given, subsequent channel read/write operations will + raise a timeout exception if the timeout period value has elapsed + before the operation has completed. Setting a timeout of ``None`` + disables timeouts on socket operations. + + ``chan.settimeout(0.0)`` is equivalent to ``chan.setblocking(0)``; + ``chan.settimeout(None)`` is equivalent to ``chan.setblocking(1)``. + + :param float timeout: + seconds to wait for a pending read/write operation before raising + ``socket.timeout``, or ``None`` for no timeout. + """ + self.timeout = timeout + + def gettimeout(self): + """ + Returns the timeout in seconds (as a float) associated with socket + operations, or ``None`` if no timeout is set. This reflects the last + call to `setblocking` or `settimeout`. + """ + return self.timeout + + def setblocking(self, blocking): + """ + Set blocking or non-blocking mode of the channel: if ``blocking`` is 0, + the channel is set to non-blocking mode; otherwise it's set to blocking + mode. Initially all channels are in blocking mode. + + In non-blocking mode, if a `recv` call doesn't find any data, or if a + `send` call can't immediately dispose of the data, an error exception + is raised. In blocking mode, the calls block until they can proceed. An + EOF condition is considered "immediate data" for `recv`, so if the + channel is closed in the read direction, it will never block. + + ``chan.setblocking(0)`` is equivalent to ``chan.settimeout(0)``; + ``chan.setblocking(1)`` is equivalent to ``chan.settimeout(None)``. + + :param int blocking: + 0 to set non-blocking mode; non-0 to set blocking mode. + """ + if blocking: + self.settimeout(None) + else: + self.settimeout(0.0) + + def getpeername(self): + """ + Return the address of the remote side of this Channel, if possible. + + This simply wraps `.Transport.getpeername`, used to provide enough of a + socket-like interface to allow asyncore to work. (asyncore likes to + call ``'getpeername'``.) + """ + return self.transport.getpeername() + + def close(self): + """ + Close the channel. All future read/write operations on the channel + will fail. The remote end will receive no more data (after queued data + is flushed). Channels are automatically closed when their `.Transport` + is closed or when they are garbage collected. + """ + self.lock.acquire() + try: + # only close the pipe when the user explicitly closes the channel. + # otherwise they will get unpleasant surprises. (and do it before + # checking self.closed, since the remote host may have already + # closed the connection.) + if self._pipe is not None: + self._pipe.close() + self._pipe = None + + if not self.active or self.closed: + return + msgs = self._close_internal() + finally: + self.lock.release() + for m in msgs: + if m is not None: + self.transport._send_user_message(m) + + def recv_ready(self): + """ + Returns true if data is buffered and ready to be read from this + channel. A ``False`` result does not mean that the channel has closed; + it means you may need to wait before more data arrives. + + :return: + ``True`` if a `recv` call on this channel would immediately return + at least one byte; ``False`` otherwise. + """ + return self.in_buffer.read_ready() + + def recv(self, nbytes): + """ + Receive data from the channel. The return value is a string + representing the data received. The maximum amount of data to be + received at once is specified by ``nbytes``. If a string of + length zero is returned, the channel stream has closed. + + :param int nbytes: maximum number of bytes to read. + :return: received data, as a `bytes`. + + :raises socket.timeout: + if no data is ready before the timeout set by `settimeout`. + """ + try: + out = self.in_buffer.read(nbytes, self.timeout) + except PipeTimeout: + raise socket.timeout() + + ack = self._check_add_window(len(out)) + # no need to hold the channel lock when sending this + if ack > 0: + m = Message() + m.add_byte(cMSG_CHANNEL_WINDOW_ADJUST) + m.add_int(self.remote_chanid) + m.add_int(ack) + self.transport._send_user_message(m) + + return out + + def recv_stderr_ready(self): + """ + Returns true if data is buffered and ready to be read from this + channel's stderr stream. Only channels using `exec_command` or + `invoke_shell` without a pty will ever have data on the stderr + stream. + + :return: + ``True`` if a `recv_stderr` call on this channel would immediately + return at least one byte; ``False`` otherwise. + + .. versionadded:: 1.1 + """ + return self.in_stderr_buffer.read_ready() + + def recv_stderr(self, nbytes): + """ + Receive data from the channel's stderr stream. Only channels using + `exec_command` or `invoke_shell` without a pty will ever have data + on the stderr stream. The return value is a string representing the + data received. The maximum amount of data to be received at once is + specified by ``nbytes``. If a string of length zero is returned, the + channel stream has closed. + + :param int nbytes: maximum number of bytes to read. + :return: received data as a `bytes` + + :raises socket.timeout: if no data is ready before the timeout set by + `settimeout`. + + .. versionadded:: 1.1 + """ + try: + out = self.in_stderr_buffer.read(nbytes, self.timeout) + except PipeTimeout: + raise socket.timeout() + + ack = self._check_add_window(len(out)) + # no need to hold the channel lock when sending this + if ack > 0: + m = Message() + m.add_byte(cMSG_CHANNEL_WINDOW_ADJUST) + m.add_int(self.remote_chanid) + m.add_int(ack) + self.transport._send_user_message(m) + + return out + + def send_ready(self): + """ + Returns true if data can be written to this channel without blocking. + This means the channel is either closed (so any write attempt would + return immediately) or there is at least one byte of space in the + outbound buffer. If there is at least one byte of space in the + outbound buffer, a `send` call will succeed immediately and return + the number of bytes actually written. + + :return: + ``True`` if a `send` call on this channel would immediately succeed + or fail + """ + self.lock.acquire() + try: + if self.closed or self.eof_sent: + return True + return self.out_window_size > 0 + finally: + self.lock.release() + + def send(self, s): + """ + Send data to the channel. Returns the number of bytes sent, or 0 if + the channel stream is closed. Applications are responsible for + checking that all data has been sent: if only some of the data was + transmitted, the application needs to attempt delivery of the remaining + data. + + :param bytes s: data to send + :return: number of bytes actually sent, as an `int` + + :raises socket.timeout: if no data could be sent before the timeout set + by `settimeout`. + """ + + m = Message() + m.add_byte(cMSG_CHANNEL_DATA) + m.add_int(self.remote_chanid) + return self._send(s, m) + + def send_stderr(self, s): + """ + Send data to the channel on the "stderr" stream. This is normally + only used by servers to send output from shell commands -- clients + won't use this. Returns the number of bytes sent, or 0 if the channel + stream is closed. Applications are responsible for checking that all + data has been sent: if only some of the data was transmitted, the + application needs to attempt delivery of the remaining data. + + :param bytes s: data to send. + :return: number of bytes actually sent, as an `int`. + + :raises socket.timeout: + if no data could be sent before the timeout set by `settimeout`. + + .. versionadded:: 1.1 + """ + + m = Message() + m.add_byte(cMSG_CHANNEL_EXTENDED_DATA) + m.add_int(self.remote_chanid) + m.add_int(1) + return self._send(s, m) + + def sendall(self, s): + """ + Send data to the channel, without allowing partial results. Unlike + `send`, this method continues to send data from the given string until + either all data has been sent or an error occurs. Nothing is returned. + + :param bytes s: data to send. + + :raises socket.timeout: + if sending stalled for longer than the timeout set by `settimeout`. + :raises socket.error: + if an error occurred before the entire string was sent. + + .. note:: + If the channel is closed while only part of the data has been + sent, there is no way to determine how much data (if any) was sent. + This is irritating, but identically follows Python's API. + """ + while s: + sent = self.send(s) + s = s[sent:] + return None + + def sendall_stderr(self, s): + """ + Send data to the channel's "stderr" stream, without allowing partial + results. Unlike `send_stderr`, this method continues to send data + from the given bytestring until all data has been sent or an error + occurs. Nothing is returned. + + :param bytes s: data to send to the client as "stderr" output. + + :raises socket.timeout: + if sending stalled for longer than the timeout set by `settimeout`. + :raises socket.error: + if an error occurred before the entire string was sent. + + .. versionadded:: 1.1 + """ + while s: + sent = self.send_stderr(s) + s = s[sent:] + return None + + def makefile(self, *params): + """ + Return a file-like object associated with this channel. The optional + ``mode`` and ``bufsize`` arguments are interpreted the same way as by + the built-in ``file()`` function in Python. + + :return: `.ChannelFile` object which can be used for Python file I/O. + """ + return ChannelFile(*([self] + list(params))) + + def makefile_stderr(self, *params): + """ + Return a file-like object associated with this channel's stderr + stream. Only channels using `exec_command` or `invoke_shell` + without a pty will ever have data on the stderr stream. + + The optional ``mode`` and ``bufsize`` arguments are interpreted the + same way as by the built-in ``file()`` function in Python. For a + client, it only makes sense to open this file for reading. For a + server, it only makes sense to open this file for writing. + + :returns: + `.ChannelStderrFile` object which can be used for Python file I/O. + + .. versionadded:: 1.1 + """ + return ChannelStderrFile(*([self] + list(params))) + + def makefile_stdin(self, *params): + """ + Return a file-like object associated with this channel's stdin + stream. + + The optional ``mode`` and ``bufsize`` arguments are interpreted the + same way as by the built-in ``file()`` function in Python. For a + client, it only makes sense to open this file for writing. For a + server, it only makes sense to open this file for reading. + + :returns: + `.ChannelStdinFile` object which can be used for Python file I/O. + + .. versionadded:: 2.6 + """ + return ChannelStdinFile(*([self] + list(params))) + + def fileno(self): + """ + Returns an OS-level file descriptor which can be used for polling, but + but not for reading or writing. This is primarily to allow Python's + ``select`` module to work. + + The first time ``fileno`` is called on a channel, a pipe is created to + simulate real OS-level file descriptor (FD) behavior. Because of this, + two OS-level FDs are created, which will use up FDs faster than normal. + (You won't notice this effect unless you have hundreds of channels + open at the same time.) + + :return: an OS-level file descriptor (`int`) + + .. warning:: + This method causes channel reads to be slightly less efficient. + """ + self.lock.acquire() + try: + if self._pipe is not None: + return self._pipe.fileno() + # create the pipe and feed in any existing data + self._pipe = pipe.make_pipe() + p1, p2 = pipe.make_or_pipe(self._pipe) + self.in_buffer.set_event(p1) + self.in_stderr_buffer.set_event(p2) + return self._pipe.fileno() + finally: + self.lock.release() + + def shutdown(self, how): + """ + Shut down one or both halves of the connection. If ``how`` is 0, + further receives are disallowed. If ``how`` is 1, further sends + are disallowed. If ``how`` is 2, further sends and receives are + disallowed. This closes the stream in one or both directions. + + :param int how: + 0 (stop receiving), 1 (stop sending), or 2 (stop receiving and + sending). + """ + if (how == 0) or (how == 2): + # feign "read" shutdown + self.eof_received = 1 + if (how == 1) or (how == 2): + self.lock.acquire() + try: + m = self._send_eof() + finally: + self.lock.release() + if m is not None and self.transport is not None: + self.transport._send_user_message(m) + + def shutdown_read(self): + """ + Shutdown the receiving side of this socket, closing the stream in + the incoming direction. After this call, future reads on this + channel will fail instantly. This is a convenience method, equivalent + to ``shutdown(0)``, for people who don't make it a habit to + memorize unix constants from the 1970s. + + .. versionadded:: 1.2 + """ + self.shutdown(0) + + def shutdown_write(self): + """ + Shutdown the sending side of this socket, closing the stream in + the outgoing direction. After this call, future writes on this + channel will fail instantly. This is a convenience method, equivalent + to ``shutdown(1)``, for people who don't make it a habit to + memorize unix constants from the 1970s. + + .. versionadded:: 1.2 + """ + self.shutdown(1) + + @property + def _closed(self): + # Concession to Python 3's socket API, which has a private ._closed + # attribute instead of a semipublic .closed attribute. + return self.closed + + # ...calls from Transport + + def _set_transport(self, transport): + self.transport = transport + self.logger = util.get_logger(self.transport.get_log_channel()) + + def _set_window(self, window_size, max_packet_size): + self.in_window_size = window_size + self.in_max_packet_size = max_packet_size + # threshold of bytes we receive before we bother to send + # a window update + self.in_window_threshold = window_size // 10 + self.in_window_sofar = 0 + self._log(DEBUG, "Max packet in: {} bytes".format(max_packet_size)) + + def _set_remote_channel(self, chanid, window_size, max_packet_size): + self.remote_chanid = chanid + self.out_window_size = window_size + self.out_max_packet_size = self.transport._sanitize_packet_size( + max_packet_size + ) + self.active = 1 + self._log( + DEBUG, "Max packet out: {} bytes".format(self.out_max_packet_size) + ) + + def _request_success(self, m): + self._log(DEBUG, "Sesch channel {} request ok".format(self.chanid)) + self.event_ready = True + self.event.set() + return + + def _request_failed(self, m): + self.lock.acquire() + try: + msgs = self._close_internal() + finally: + self.lock.release() + for m in msgs: + if m is not None: + self.transport._send_user_message(m) + + def _feed(self, m): + if isinstance(m, bytes): + # passed from _feed_extended + s = m + else: + s = m.get_binary() + self.in_buffer.feed(s) + + def _feed_extended(self, m): + code = m.get_int() + s = m.get_binary() + if code != 1: + self._log( + ERROR, "unknown extended_data type {}; discarding".format(code) + ) + return + if self.combine_stderr: + self._feed(s) + else: + self.in_stderr_buffer.feed(s) + + def _window_adjust(self, m): + nbytes = m.get_int() + self.lock.acquire() + try: + if self.ultra_debug: + self._log(DEBUG, "window up {}".format(nbytes)) + self.out_window_size += nbytes + self.out_buffer_cv.notify_all() + finally: + self.lock.release() + + def _handle_request(self, m): + key = m.get_text() + want_reply = m.get_boolean() + server = self.transport.server_object + ok = False + if key == "exit-status": + self.exit_status = m.get_int() + self.status_event.set() + ok = True + elif key == "xon-xoff": + # ignore + ok = True + elif key == "pty-req": + term = m.get_string() + width = m.get_int() + height = m.get_int() + pixelwidth = m.get_int() + pixelheight = m.get_int() + modes = m.get_string() + if server is None: + ok = False + else: + ok = server.check_channel_pty_request( + self, term, width, height, pixelwidth, pixelheight, modes + ) + elif key == "shell": + if server is None: + ok = False + else: + ok = server.check_channel_shell_request(self) + elif key == "env": + name = m.get_string() + value = m.get_string() + if server is None: + ok = False + else: + ok = server.check_channel_env_request(self, name, value) + elif key == "exec": + cmd = m.get_string() + if server is None: + ok = False + else: + ok = server.check_channel_exec_request(self, cmd) + elif key == "subsystem": + name = m.get_text() + if server is None: + ok = False + else: + ok = server.check_channel_subsystem_request(self, name) + elif key == "window-change": + width = m.get_int() + height = m.get_int() + pixelwidth = m.get_int() + pixelheight = m.get_int() + if server is None: + ok = False + else: + ok = server.check_channel_window_change_request( + self, width, height, pixelwidth, pixelheight + ) + elif key == "x11-req": + single_connection = m.get_boolean() + auth_proto = m.get_text() + auth_cookie = m.get_binary() + screen_number = m.get_int() + if server is None: + ok = False + else: + ok = server.check_channel_x11_request( + self, + single_connection, + auth_proto, + auth_cookie, + screen_number, + ) + elif key == "auth-agent-req@openssh.com": + if server is None: + ok = False + else: + ok = server.check_channel_forward_agent_request(self) + else: + self._log(DEBUG, 'Unhandled channel request "{}"'.format(key)) + ok = False + if want_reply: + m = Message() + if ok: + m.add_byte(cMSG_CHANNEL_SUCCESS) + else: + m.add_byte(cMSG_CHANNEL_FAILURE) + m.add_int(self.remote_chanid) + self.transport._send_user_message(m) + + def _handle_eof(self, m): + self.lock.acquire() + try: + if not self.eof_received: + self.eof_received = True + self.in_buffer.close() + self.in_stderr_buffer.close() + if self._pipe is not None: + self._pipe.set_forever() + finally: + self.lock.release() + self._log(DEBUG, "EOF received ({})".format(self._name)) + + def _handle_close(self, m): + self.lock.acquire() + try: + msgs = self._close_internal() + self.transport._unlink_channel(self.chanid) + finally: + self.lock.release() + for m in msgs: + if m is not None: + self.transport._send_user_message(m) + + # ...internals... + + def _send(self, s, m): + size = len(s) + self.lock.acquire() + try: + if self.closed: + # this doesn't seem useful, but it is the documented behavior + # of Socket + raise socket.error("Socket is closed") + size = self._wait_for_send_window(size) + if size == 0: + # eof or similar + return 0 + m.add_string(s[:size]) + finally: + self.lock.release() + # Note: We release self.lock before calling _send_user_message. + # Otherwise, we can deadlock during re-keying. + self.transport._send_user_message(m) + return size + + def _log(self, level, msg, *args): + self.logger.log(level, "[chan " + self._name + "] " + msg, *args) + + def _event_pending(self): + self.event.clear() + self.event_ready = False + + def _wait_for_event(self): + self.event.wait() + assert self.event.is_set() + if self.event_ready: + return + e = self.transport.get_exception() + if e is None: + e = SSHException("Channel closed.") + raise e + + def _set_closed(self): + # you are holding the lock. + self.closed = True + self.in_buffer.close() + self.in_stderr_buffer.close() + self.out_buffer_cv.notify_all() + # Notify any waiters that we are closed + self.event.set() + self.status_event.set() + if self._pipe is not None: + self._pipe.set_forever() + + def _send_eof(self): + # you are holding the lock. + if self.eof_sent: + return None + m = Message() + m.add_byte(cMSG_CHANNEL_EOF) + m.add_int(self.remote_chanid) + self.eof_sent = True + self._log(DEBUG, "EOF sent ({})".format(self._name)) + return m + + def _close_internal(self): + # you are holding the lock. + if not self.active or self.closed: + return None, None + m1 = self._send_eof() + m2 = Message() + m2.add_byte(cMSG_CHANNEL_CLOSE) + m2.add_int(self.remote_chanid) + self._set_closed() + # can't unlink from the Transport yet -- the remote side may still + # try to send meta-data (exit-status, etc) + return m1, m2 + + def _unlink(self): + # server connection could die before we become active: + # still signal the close! + if self.closed: + return + self.lock.acquire() + try: + self._set_closed() + self.transport._unlink_channel(self.chanid) + finally: + self.lock.release() + + def _check_add_window(self, n): + self.lock.acquire() + try: + if self.closed or self.eof_received or not self.active: + return 0 + if self.ultra_debug: + self._log(DEBUG, "addwindow {}".format(n)) + self.in_window_sofar += n + if self.in_window_sofar <= self.in_window_threshold: + return 0 + if self.ultra_debug: + self._log( + DEBUG, "addwindow send {}".format(self.in_window_sofar) + ) + out = self.in_window_sofar + self.in_window_sofar = 0 + return out + finally: + self.lock.release() + + def _wait_for_send_window(self, size): + """ + (You are already holding the lock.) + Wait for the send window to open up, and allocate up to ``size`` bytes + for transmission. If no space opens up before the timeout, a timeout + exception is raised. Returns the number of bytes available to send + (may be less than requested). + """ + # you are already holding the lock + if self.closed or self.eof_sent: + return 0 + if self.out_window_size == 0: + # should we block? + if self.timeout == 0.0: + raise socket.timeout() + # loop here in case we get woken up but a different thread has + # filled the buffer + timeout = self.timeout + while self.out_window_size == 0: + if self.closed or self.eof_sent: + return 0 + then = time.time() + self.out_buffer_cv.wait(timeout) + if timeout is not None: + timeout -= time.time() - then + if timeout <= 0.0: + raise socket.timeout() + # we have some window to squeeze into + if self.closed or self.eof_sent: + return 0 + if self.out_window_size < size: + size = self.out_window_size + if self.out_max_packet_size - 64 < size: + size = self.out_max_packet_size - 64 + self.out_window_size -= size + if self.ultra_debug: + self._log(DEBUG, "window down to {}".format(self.out_window_size)) + return size + + +class ChannelFile(BufferedFile): + """ + A file-like wrapper around `.Channel`. A ChannelFile is created by calling + `Channel.makefile`. + + .. warning:: + To correctly emulate the file object created from a socket's `makefile + ` method, a `.Channel` and its + `.ChannelFile` should be able to be closed or garbage-collected + independently. Currently, closing the `ChannelFile` does nothing but + flush the buffer. + """ + + def __init__(self, channel, mode="r", bufsize=-1): + self.channel = channel + BufferedFile.__init__(self) + self._set_mode(mode, bufsize) + + def __repr__(self): + """ + Returns a string representation of this object, for debugging. + """ + return "" + + def _read(self, size): + return self.channel.recv(size) + + def _write(self, data): + self.channel.sendall(data) + return len(data) + + +class ChannelStderrFile(ChannelFile): + """ + A file-like wrapper around `.Channel` stderr. + + See `Channel.makefile_stderr` for details. + """ + + def _read(self, size): + return self.channel.recv_stderr(size) + + def _write(self, data): + self.channel.sendall_stderr(data) + return len(data) + + +class ChannelStdinFile(ChannelFile): + """ + A file-like wrapper around `.Channel` stdin. + + See `Channel.makefile_stdin` for details. + """ + + def close(self): + super().close() + self.channel.shutdown_write() diff --git a/.venv/lib/python3.9/site-packages/paramiko/client.py b/.venv/lib/python3.9/site-packages/paramiko/client.py new file mode 100644 index 0000000..1f674a9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/client.py @@ -0,0 +1,889 @@ +# Copyright (C) 2006-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +SSH client & key policies +""" + +from binascii import hexlify +import getpass +import inspect +import os +import socket +import warnings +from errno import ECONNREFUSED, EHOSTUNREACH + +from paramiko.agent import Agent +from paramiko.common import DEBUG +from paramiko.config import SSH_PORT +from paramiko.ecdsakey import ECDSAKey +from paramiko.ed25519key import Ed25519Key +from paramiko.hostkeys import HostKeys +from paramiko.rsakey import RSAKey +from paramiko.ssh_exception import ( + SSHException, + BadHostKeyException, + NoValidConnectionsError, +) +from paramiko.transport import Transport +from paramiko.util import ClosingContextManager + + +class SSHClient(ClosingContextManager): + """ + A high-level representation of a session with an SSH server. This class + wraps `.Transport`, `.Channel`, and `.SFTPClient` to take care of most + aspects of authenticating and opening channels. A typical use case is:: + + client = SSHClient() + client.load_system_host_keys() + client.connect('ssh.example.com') + stdin, stdout, stderr = client.exec_command('ls -l') + + You may pass in explicit overrides for authentication and server host key + checking. The default mechanism is to try to use local key files or an + SSH agent (if one is running). + + Instances of this class may be used as context managers. + + .. versionadded:: 1.6 + """ + + def __init__(self): + """ + Create a new SSHClient. + """ + self._system_host_keys = HostKeys() + self._host_keys = HostKeys() + self._host_keys_filename = None + self._log_channel = None + self._policy = RejectPolicy() + self._transport = None + self._agent = None + + def load_system_host_keys(self, filename=None): + """ + Load host keys from a system (read-only) file. Host keys read with + this method will not be saved back by `save_host_keys`. + + This method can be called multiple times. Each new set of host keys + will be merged with the existing set (new replacing old if there are + conflicts). + + If ``filename`` is left as ``None``, an attempt will be made to read + keys from the user's local "known hosts" file, as used by OpenSSH, + and no exception will be raised if the file can't be read. This is + probably only useful on posix. + + :param str filename: the filename to read, or ``None`` + + :raises: ``IOError`` -- + if a filename was provided and the file could not be read + """ + if filename is None: + # try the user's .ssh key file, and mask exceptions + filename = os.path.expanduser("~/.ssh/known_hosts") + try: + self._system_host_keys.load(filename) + except IOError: + pass + return + self._system_host_keys.load(filename) + + def load_host_keys(self, filename): + """ + Load host keys from a local host-key file. Host keys read with this + method will be checked after keys loaded via `load_system_host_keys`, + but will be saved back by `save_host_keys` (so they can be modified). + The missing host key policy `.AutoAddPolicy` adds keys to this set and + saves them, when connecting to a previously-unknown server. + + This method can be called multiple times. Each new set of host keys + will be merged with the existing set (new replacing old if there are + conflicts). When automatically saving, the last hostname is used. + + :param str filename: the filename to read + + :raises: ``IOError`` -- if the filename could not be read + """ + self._host_keys_filename = filename + self._host_keys.load(filename) + + def save_host_keys(self, filename): + """ + Save the host keys back to a file. Only the host keys loaded with + `load_host_keys` (plus any added directly) will be saved -- not any + host keys loaded with `load_system_host_keys`. + + :param str filename: the filename to save to + + :raises: ``IOError`` -- if the file could not be written + """ + + # update local host keys from file (in case other SSH clients + # have written to the known_hosts file meanwhile. + if self._host_keys_filename is not None: + self.load_host_keys(self._host_keys_filename) + + with open(filename, "w") as f: + for hostname, keys in self._host_keys.items(): + for keytype, key in keys.items(): + f.write( + "{} {} {}\n".format( + hostname, keytype, key.get_base64() + ) + ) + + def get_host_keys(self): + """ + Get the local `.HostKeys` object. This can be used to examine the + local host keys or change them. + + :return: the local host keys as a `.HostKeys` object. + """ + return self._host_keys + + def set_log_channel(self, name): + """ + Set the channel for logging. The default is ``"paramiko.transport"`` + but it can be set to anything you want. + + :param str name: new channel name for logging + """ + self._log_channel = name + + def set_missing_host_key_policy(self, policy): + """ + Set policy to use when connecting to servers without a known host key. + + Specifically: + + * A **policy** is a "policy class" (or instance thereof), namely some + subclass of `.MissingHostKeyPolicy` such as `.RejectPolicy` (the + default), `.AutoAddPolicy`, `.WarningPolicy`, or a user-created + subclass. + * A host key is **known** when it appears in the client object's cached + host keys structures (those manipulated by `load_system_host_keys` + and/or `load_host_keys`). + + :param .MissingHostKeyPolicy policy: + the policy to use when receiving a host key from a + previously-unknown server + """ + if inspect.isclass(policy): + policy = policy() + self._policy = policy + + def _families_and_addresses(self, hostname, port): + """ + Yield pairs of address families and addresses to try for connecting. + + :param str hostname: the server to connect to + :param int port: the server port to connect to + :returns: Yields an iterable of ``(family, address)`` tuples + """ + guess = True + addrinfos = socket.getaddrinfo( + hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM + ) + for (family, socktype, proto, canonname, sockaddr) in addrinfos: + if socktype == socket.SOCK_STREAM: + yield family, sockaddr + guess = False + + # some OS like AIX don't indicate SOCK_STREAM support, so just + # guess. :( We only do this if we did not get a single result marked + # as socktype == SOCK_STREAM. + if guess: + for family, _, _, _, sockaddr in addrinfos: + yield family, sockaddr + + def connect( + self, + hostname, + port=SSH_PORT, + username=None, + password=None, + pkey=None, + key_filename=None, + timeout=None, + allow_agent=True, + look_for_keys=True, + compress=False, + sock=None, + gss_auth=False, + gss_kex=False, + gss_deleg_creds=True, + gss_host=None, + banner_timeout=None, + auth_timeout=None, + channel_timeout=None, + gss_trust_dns=True, + passphrase=None, + disabled_algorithms=None, + transport_factory=None, + auth_strategy=None, + ): + """ + Connect to an SSH server and authenticate to it. The server's host key + is checked against the system host keys (see `load_system_host_keys`) + and any local host keys (`load_host_keys`). If the server's hostname + is not found in either set of host keys, the missing host key policy + is used (see `set_missing_host_key_policy`). The default policy is + to reject the key and raise an `.SSHException`. + + Authentication is attempted in the following order of priority: + + - The ``pkey`` or ``key_filename`` passed in (if any) + + - ``key_filename`` may contain OpenSSH public certificate paths + as well as regular private-key paths; when files ending in + ``-cert.pub`` are found, they are assumed to match a private + key, and both components will be loaded. (The private key + itself does *not* need to be listed in ``key_filename`` for + this to occur - *just* the certificate.) + + - Any key we can find through an SSH agent + - Any ``id_*`` keys discoverable in ``~/.ssh/`` + + - When OpenSSH-style public certificates exist that match an + existing such private key (so e.g. one has ``id_rsa`` and + ``id_rsa-cert.pub``) the certificate will be loaded alongside + the private key and used for authentication. + + - Plain username/password auth, if a password was given + + If a private key requires a password to unlock it, and a password is + passed in, that password will be used to attempt to unlock the key. + + :param str hostname: the server to connect to + :param int port: the server port to connect to + :param str username: + the username to authenticate as (defaults to the current local + username) + :param str password: + Used for password authentication; is also used for private key + decryption if ``passphrase`` is not given. + :param str passphrase: + Used for decrypting private keys. + :param .PKey pkey: an optional private key to use for authentication + :param str key_filename: + the filename, or list of filenames, of optional private key(s) + and/or certs to try for authentication + :param float timeout: + an optional timeout (in seconds) for the TCP connect + :param bool allow_agent: + set to False to disable connecting to the SSH agent + :param bool look_for_keys: + set to False to disable searching for discoverable private key + files in ``~/.ssh/`` + :param bool compress: set to True to turn on compression + :param socket sock: + an open socket or socket-like object (such as a `.Channel`) to use + for communication to the target host + :param bool gss_auth: + ``True`` if you want to use GSS-API authentication + :param bool gss_kex: + Perform GSS-API Key Exchange and user authentication + :param bool gss_deleg_creds: Delegate GSS-API client credentials or not + :param str gss_host: + The targets name in the kerberos database. default: hostname + :param bool gss_trust_dns: + Indicates whether or not the DNS is trusted to securely + canonicalize the name of the host being connected to (default + ``True``). + :param float banner_timeout: an optional timeout (in seconds) to wait + for the SSH banner to be presented. + :param float auth_timeout: an optional timeout (in seconds) to wait for + an authentication response. + :param float channel_timeout: an optional timeout (in seconds) to wait + for a channel open response. + :param dict disabled_algorithms: + an optional dict passed directly to `.Transport` and its keyword + argument of the same name. + :param transport_factory: + an optional callable which is handed a subset of the constructor + arguments (primarily those related to the socket, GSS + functionality, and algorithm selection) and generates a + `.Transport` instance to be used by this client. Defaults to + `.Transport.__init__`. + :param auth_strategy: + an optional instance of `.AuthStrategy`, triggering use of this + newer authentication mechanism instead of SSHClient's legacy auth + method. + + .. warning:: + This parameter is **incompatible** with all other + authentication-related parameters (such as, but not limited to, + ``password``, ``key_filename`` and ``allow_agent``) and will + trigger an exception if given alongside them. + + :returns: + `.AuthResult` if ``auth_strategy`` is non-``None``; otherwise, + returns ``None``. + + :raises BadHostKeyException: + if the server's host key could not be verified. + :raises AuthenticationException: + if authentication failed. + :raises UnableToAuthenticate: + if authentication failed (when ``auth_strategy`` is non-``None``; + and note that this is a subclass of ``AuthenticationException``). + :raises socket.error: + if a socket error (other than connection-refused or + host-unreachable) occurred while connecting. + :raises NoValidConnectionsError: + if all valid connection targets for the requested hostname (eg IPv4 + and IPv6) yielded connection-refused or host-unreachable socket + errors. + :raises SSHException: + if there was any other error connecting or establishing an SSH + session. + + .. versionchanged:: 1.15 + Added the ``banner_timeout``, ``gss_auth``, ``gss_kex``, + ``gss_deleg_creds`` and ``gss_host`` arguments. + .. versionchanged:: 2.3 + Added the ``gss_trust_dns`` argument. + .. versionchanged:: 2.4 + Added the ``passphrase`` argument. + .. versionchanged:: 2.6 + Added the ``disabled_algorithms`` argument. + .. versionchanged:: 2.12 + Added the ``transport_factory`` argument. + .. versionchanged:: 3.2 + Added the ``auth_strategy`` argument. + """ + if not sock: + errors = {} + # Try multiple possible address families (e.g. IPv4 vs IPv6) + to_try = list(self._families_and_addresses(hostname, port)) + for af, addr in to_try: + try: + sock = socket.socket(af, socket.SOCK_STREAM) + if timeout is not None: + try: + sock.settimeout(timeout) + except: + pass + sock.connect(addr) + # Break out of the loop on success + break + except socket.error as e: + # As mentioned in socket docs it is better + # to close sockets explicitly + if sock: + sock.close() + # Raise anything that isn't a straight up connection error + # (such as a resolution error) + if e.errno not in (ECONNREFUSED, EHOSTUNREACH): + raise + # Capture anything else so we know how the run looks once + # iteration is complete. Retain info about which attempt + # this was. + errors[addr] = e + + # Make sure we explode usefully if no address family attempts + # succeeded. We've no way of knowing which error is the "right" + # one, so we construct a hybrid exception containing all the real + # ones, of a subclass that client code should still be watching for + # (socket.error) + if len(errors) == len(to_try): + raise NoValidConnectionsError(errors) + + if transport_factory is None: + transport_factory = Transport + t = self._transport = transport_factory( + sock, + gss_kex=gss_kex, + gss_deleg_creds=gss_deleg_creds, + disabled_algorithms=disabled_algorithms, + ) + t.use_compression(compress=compress) + t.set_gss_host( + # t.hostname may be None, but GSS-API requires a target name. + # Therefore use hostname as fallback. + gss_host=gss_host or hostname, + trust_dns=gss_trust_dns, + gssapi_requested=gss_auth or gss_kex, + ) + if self._log_channel is not None: + t.set_log_channel(self._log_channel) + if banner_timeout is not None: + t.banner_timeout = banner_timeout + if auth_timeout is not None: + t.auth_timeout = auth_timeout + if channel_timeout is not None: + t.channel_timeout = channel_timeout + + if port == SSH_PORT: + server_hostkey_name = hostname + else: + server_hostkey_name = "[{}]:{}".format(hostname, port) + our_server_keys = None + + our_server_keys = self._system_host_keys.get(server_hostkey_name) + if our_server_keys is None: + our_server_keys = self._host_keys.get(server_hostkey_name) + if our_server_keys is not None: + keytype = our_server_keys.keys()[0] + sec_opts = t.get_security_options() + other_types = [x for x in sec_opts.key_types if x != keytype] + sec_opts.key_types = [keytype] + other_types + + t.start_client(timeout=timeout) + + # If GSS-API Key Exchange is performed we are not required to check the + # host key, because the host is authenticated via GSS-API / SSPI as + # well as our client. + if not self._transport.gss_kex_used: + server_key = t.get_remote_server_key() + if our_server_keys is None: + # will raise exception if the key is rejected + self._policy.missing_host_key( + self, server_hostkey_name, server_key + ) + else: + our_key = our_server_keys.get(server_key.get_name()) + if our_key != server_key: + if our_key is None: + our_key = list(our_server_keys.values())[0] + raise BadHostKeyException(hostname, server_key, our_key) + + if username is None: + username = getpass.getuser() + + # New auth flow! + if auth_strategy is not None: + return auth_strategy.authenticate(transport=t) + + # Old auth flow! + if key_filename is None: + key_filenames = [] + elif isinstance(key_filename, str): + key_filenames = [key_filename] + else: + key_filenames = key_filename + + self._auth( + username, + password, + pkey, + key_filenames, + allow_agent, + look_for_keys, + gss_auth, + gss_kex, + gss_deleg_creds, + t.gss_host, + passphrase, + ) + + def close(self): + """ + Close this SSHClient and its underlying `.Transport`. + + This should be called anytime you are done using the client object. + + .. warning:: + Paramiko registers garbage collection hooks that will try to + automatically close connections for you, but this is not presently + reliable. Failure to explicitly close your client after use may + lead to end-of-process hangs! + """ + if self._transport is None: + return + self._transport.close() + self._transport = None + + if self._agent is not None: + self._agent.close() + self._agent = None + + def exec_command( + self, + command, + bufsize=-1, + timeout=None, + get_pty=False, + environment=None, + ): + """ + Execute a command on the SSH server. A new `.Channel` is opened and + the requested command is executed. The command's input and output + streams are returned as Python ``file``-like objects representing + stdin, stdout, and stderr. + + :param str command: the command to execute + :param int bufsize: + interpreted the same way as by the built-in ``file()`` function in + Python + :param int timeout: + set command's channel timeout. See `.Channel.settimeout` + :param bool get_pty: + Request a pseudo-terminal from the server (default ``False``). + See `.Channel.get_pty` + :param dict environment: + a dict of shell environment variables, to be merged into the + default environment that the remote command executes within. + + .. warning:: + Servers may silently reject some environment variables; see the + warning in `.Channel.set_environment_variable` for details. + + :return: + the stdin, stdout, and stderr of the executing command, as a + 3-tuple + + :raises: `.SSHException` -- if the server fails to execute the command + + .. versionchanged:: 1.10 + Added the ``get_pty`` kwarg. + """ + chan = self._transport.open_session(timeout=timeout) + if get_pty: + chan.get_pty() + chan.settimeout(timeout) + if environment: + chan.update_environment(environment) + chan.exec_command(command) + stdin = chan.makefile_stdin("wb", bufsize) + stdout = chan.makefile("r", bufsize) + stderr = chan.makefile_stderr("r", bufsize) + return stdin, stdout, stderr + + def invoke_shell( + self, + term="vt100", + width=80, + height=24, + width_pixels=0, + height_pixels=0, + environment=None, + ): + """ + Start an interactive shell session on the SSH server. A new `.Channel` + is opened and connected to a pseudo-terminal using the requested + terminal type and size. + + :param str term: + the terminal type to emulate (for example, ``"vt100"``) + :param int width: the width (in characters) of the terminal window + :param int height: the height (in characters) of the terminal window + :param int width_pixels: the width (in pixels) of the terminal window + :param int height_pixels: the height (in pixels) of the terminal window + :param dict environment: the command's environment + :return: a new `.Channel` connected to the remote shell + + :raises: `.SSHException` -- if the server fails to invoke a shell + """ + chan = self._transport.open_session() + chan.get_pty(term, width, height, width_pixels, height_pixels) + chan.invoke_shell() + return chan + + def open_sftp(self): + """ + Open an SFTP session on the SSH server. + + :return: a new `.SFTPClient` session object + """ + return self._transport.open_sftp_client() + + def get_transport(self): + """ + Return the underlying `.Transport` object for this SSH connection. + This can be used to perform lower-level tasks, like opening specific + kinds of channels. + + :return: the `.Transport` for this connection + """ + return self._transport + + def _key_from_filepath(self, filename, klass, password): + """ + Attempt to derive a `.PKey` from given string path ``filename``: + + - If ``filename`` appears to be a cert, the matching private key is + loaded. + - Otherwise, the filename is assumed to be a private key, and the + matching public cert will be loaded if it exists. + """ + cert_suffix = "-cert.pub" + # Assume privkey, not cert, by default + if filename.endswith(cert_suffix): + key_path = filename[: -len(cert_suffix)] + cert_path = filename + else: + key_path = filename + cert_path = filename + cert_suffix + # Blindly try the key path; if no private key, nothing will work. + key = klass.from_private_key_file(key_path, password) + # TODO: change this to 'Loading' instead of 'Trying' sometime; probably + # when #387 is released, since this is a critical log message users are + # likely testing/filtering for (bah.) + msg = "Trying discovered key {} in {}".format( + hexlify(key.get_fingerprint()), key_path + ) + self._log(DEBUG, msg) + # Attempt to load cert if it exists. + if os.path.isfile(cert_path): + key.load_certificate(cert_path) + self._log(DEBUG, "Adding public certificate {}".format(cert_path)) + return key + + def _auth( + self, + username, + password, + pkey, + key_filenames, + allow_agent, + look_for_keys, + gss_auth, + gss_kex, + gss_deleg_creds, + gss_host, + passphrase, + ): + """ + Try, in order: + + - The key(s) passed in, if one was passed in. + - Any key we can find through an SSH agent (if allowed). + - Any id_* key discoverable in ~/.ssh/ (if allowed). + - Plain username/password auth, if a password was given. + + (The password might be needed to unlock a private key [if 'passphrase' + isn't also given], or for two-factor authentication [for which it is + required].) + """ + saved_exception = None + two_factor = False + allowed_types = set() + two_factor_types = {"keyboard-interactive", "password"} + if passphrase is None and password is not None: + passphrase = password + + # If GSS-API support and GSS-PI Key Exchange was performed, we attempt + # authentication with gssapi-keyex. + if gss_kex and self._transport.gss_kex_used: + try: + self._transport.auth_gssapi_keyex(username) + return + except Exception as e: + saved_exception = e + + # Try GSS-API authentication (gssapi-with-mic) only if GSS-API Key + # Exchange is not performed, because if we use GSS-API for the key + # exchange, there is already a fully established GSS-API context, so + # why should we do that again? + if gss_auth: + try: + return self._transport.auth_gssapi_with_mic( + username, gss_host, gss_deleg_creds + ) + except Exception as e: + saved_exception = e + + if pkey is not None: + try: + self._log( + DEBUG, + "Trying SSH key {}".format( + hexlify(pkey.get_fingerprint()) + ), + ) + allowed_types = set( + self._transport.auth_publickey(username, pkey) + ) + two_factor = allowed_types & two_factor_types + if not two_factor: + return + except SSHException as e: + saved_exception = e + + if not two_factor: + for key_filename in key_filenames: + # TODO 4.0: leverage PKey.from_path() if we don't end up just + # killing SSHClient entirely + for pkey_class in (RSAKey, ECDSAKey, Ed25519Key): + try: + key = self._key_from_filepath( + key_filename, pkey_class, passphrase + ) + allowed_types = set( + self._transport.auth_publickey(username, key) + ) + two_factor = allowed_types & two_factor_types + if not two_factor: + return + break + except SSHException as e: + saved_exception = e + + if not two_factor and allow_agent: + if self._agent is None: + self._agent = Agent() + + for key in self._agent.get_keys(): + try: + id_ = hexlify(key.get_fingerprint()) + self._log(DEBUG, "Trying SSH agent key {}".format(id_)) + # for 2-factor auth a successfully auth'd key password + # will return an allowed 2fac auth method + allowed_types = set( + self._transport.auth_publickey(username, key) + ) + two_factor = allowed_types & two_factor_types + if not two_factor: + return + break + except SSHException as e: + saved_exception = e + + if not two_factor: + keyfiles = [] + + for keytype, name in [ + (RSAKey, "rsa"), + (ECDSAKey, "ecdsa"), + (Ed25519Key, "ed25519"), + ]: + # ~/ssh/ is for windows + for directory in [".ssh", "ssh"]: + full_path = os.path.expanduser( + "~/{}/id_{}".format(directory, name) + ) + if os.path.isfile(full_path): + # TODO: only do this append if below did not run + keyfiles.append((keytype, full_path)) + if os.path.isfile(full_path + "-cert.pub"): + keyfiles.append((keytype, full_path + "-cert.pub")) + + if not look_for_keys: + keyfiles = [] + + for pkey_class, filename in keyfiles: + try: + key = self._key_from_filepath( + filename, pkey_class, passphrase + ) + # for 2-factor auth a successfully auth'd key will result + # in ['password'] + allowed_types = set( + self._transport.auth_publickey(username, key) + ) + two_factor = allowed_types & two_factor_types + if not two_factor: + return + break + except (SSHException, IOError) as e: + saved_exception = e + + if password is not None: + try: + self._transport.auth_password(username, password) + return + except SSHException as e: + saved_exception = e + elif two_factor: + try: + self._transport.auth_interactive_dumb(username) + return + except SSHException as e: + saved_exception = e + + # if we got an auth-failed exception earlier, re-raise it + if saved_exception is not None: + raise saved_exception + raise SSHException("No authentication methods available") + + def _log(self, level, msg): + self._transport._log(level, msg) + + +class MissingHostKeyPolicy: + """ + Interface for defining the policy that `.SSHClient` should use when the + SSH server's hostname is not in either the system host keys or the + application's keys. Pre-made classes implement policies for automatically + adding the key to the application's `.HostKeys` object (`.AutoAddPolicy`), + and for automatically rejecting the key (`.RejectPolicy`). + + This function may be used to ask the user to verify the key, for example. + """ + + def missing_host_key(self, client, hostname, key): + """ + Called when an `.SSHClient` receives a server key for a server that + isn't in either the system or local `.HostKeys` object. To accept + the key, simply return. To reject, raised an exception (which will + be passed to the calling application). + """ + pass + + +class AutoAddPolicy(MissingHostKeyPolicy): + """ + Policy for automatically adding the hostname and new host key to the + local `.HostKeys` object, and saving it. This is used by `.SSHClient`. + """ + + def missing_host_key(self, client, hostname, key): + client._host_keys.add(hostname, key.get_name(), key) + if client._host_keys_filename is not None: + client.save_host_keys(client._host_keys_filename) + client._log( + DEBUG, + "Adding {} host key for {}: {}".format( + key.get_name(), hostname, hexlify(key.get_fingerprint()) + ), + ) + + +class RejectPolicy(MissingHostKeyPolicy): + """ + Policy for automatically rejecting the unknown hostname & key. This is + used by `.SSHClient`. + """ + + def missing_host_key(self, client, hostname, key): + client._log( + DEBUG, + "Rejecting {} host key for {}: {}".format( + key.get_name(), hostname, hexlify(key.get_fingerprint()) + ), + ) + raise SSHException( + "Server {!r} not found in known_hosts".format(hostname) + ) + + +class WarningPolicy(MissingHostKeyPolicy): + """ + Policy for logging a Python-style warning for an unknown host key, but + accepting it. This is used by `.SSHClient`. + """ + + def missing_host_key(self, client, hostname, key): + warnings.warn( + "Unknown {} host key for {}: {}".format( + key.get_name(), hostname, hexlify(key.get_fingerprint()) + ) + ) diff --git a/.venv/lib/python3.9/site-packages/paramiko/common.py b/.venv/lib/python3.9/site-packages/paramiko/common.py new file mode 100644 index 0000000..b57149b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/common.py @@ -0,0 +1,245 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Common constants and global variables. +""" +import logging +import struct + +# +# Formerly of py3compat.py. May be fully delete'able with a deeper look? +# + + +def byte_chr(c): + assert isinstance(c, int) + return struct.pack("B", c) + + +def byte_mask(c, mask): + assert isinstance(c, int) + return struct.pack("B", c & mask) + + +def byte_ord(c): + # In case we're handed a string instead of an int. + if not isinstance(c, int): + c = ord(c) + return c + + +( + MSG_DISCONNECT, + MSG_IGNORE, + MSG_UNIMPLEMENTED, + MSG_DEBUG, + MSG_SERVICE_REQUEST, + MSG_SERVICE_ACCEPT, + MSG_EXT_INFO, +) = range(1, 8) +(MSG_KEXINIT, MSG_NEWKEYS) = range(20, 22) +( + MSG_USERAUTH_REQUEST, + MSG_USERAUTH_FAILURE, + MSG_USERAUTH_SUCCESS, + MSG_USERAUTH_BANNER, +) = range(50, 54) +MSG_USERAUTH_PK_OK = 60 +(MSG_USERAUTH_INFO_REQUEST, MSG_USERAUTH_INFO_RESPONSE) = range(60, 62) +(MSG_USERAUTH_GSSAPI_RESPONSE, MSG_USERAUTH_GSSAPI_TOKEN) = range(60, 62) +( + MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE, + MSG_USERAUTH_GSSAPI_ERROR, + MSG_USERAUTH_GSSAPI_ERRTOK, + MSG_USERAUTH_GSSAPI_MIC, +) = range(63, 67) +HIGHEST_USERAUTH_MESSAGE_ID = 79 +(MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE) = range(80, 83) +( + MSG_CHANNEL_OPEN, + MSG_CHANNEL_OPEN_SUCCESS, + MSG_CHANNEL_OPEN_FAILURE, + MSG_CHANNEL_WINDOW_ADJUST, + MSG_CHANNEL_DATA, + MSG_CHANNEL_EXTENDED_DATA, + MSG_CHANNEL_EOF, + MSG_CHANNEL_CLOSE, + MSG_CHANNEL_REQUEST, + MSG_CHANNEL_SUCCESS, + MSG_CHANNEL_FAILURE, +) = range(90, 101) + +cMSG_DISCONNECT = byte_chr(MSG_DISCONNECT) +cMSG_IGNORE = byte_chr(MSG_IGNORE) +cMSG_UNIMPLEMENTED = byte_chr(MSG_UNIMPLEMENTED) +cMSG_DEBUG = byte_chr(MSG_DEBUG) +cMSG_SERVICE_REQUEST = byte_chr(MSG_SERVICE_REQUEST) +cMSG_SERVICE_ACCEPT = byte_chr(MSG_SERVICE_ACCEPT) +cMSG_EXT_INFO = byte_chr(MSG_EXT_INFO) +cMSG_KEXINIT = byte_chr(MSG_KEXINIT) +cMSG_NEWKEYS = byte_chr(MSG_NEWKEYS) +cMSG_USERAUTH_REQUEST = byte_chr(MSG_USERAUTH_REQUEST) +cMSG_USERAUTH_FAILURE = byte_chr(MSG_USERAUTH_FAILURE) +cMSG_USERAUTH_SUCCESS = byte_chr(MSG_USERAUTH_SUCCESS) +cMSG_USERAUTH_BANNER = byte_chr(MSG_USERAUTH_BANNER) +cMSG_USERAUTH_PK_OK = byte_chr(MSG_USERAUTH_PK_OK) +cMSG_USERAUTH_INFO_REQUEST = byte_chr(MSG_USERAUTH_INFO_REQUEST) +cMSG_USERAUTH_INFO_RESPONSE = byte_chr(MSG_USERAUTH_INFO_RESPONSE) +cMSG_USERAUTH_GSSAPI_RESPONSE = byte_chr(MSG_USERAUTH_GSSAPI_RESPONSE) +cMSG_USERAUTH_GSSAPI_TOKEN = byte_chr(MSG_USERAUTH_GSSAPI_TOKEN) +cMSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE = byte_chr( + MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE +) +cMSG_USERAUTH_GSSAPI_ERROR = byte_chr(MSG_USERAUTH_GSSAPI_ERROR) +cMSG_USERAUTH_GSSAPI_ERRTOK = byte_chr(MSG_USERAUTH_GSSAPI_ERRTOK) +cMSG_USERAUTH_GSSAPI_MIC = byte_chr(MSG_USERAUTH_GSSAPI_MIC) +cMSG_GLOBAL_REQUEST = byte_chr(MSG_GLOBAL_REQUEST) +cMSG_REQUEST_SUCCESS = byte_chr(MSG_REQUEST_SUCCESS) +cMSG_REQUEST_FAILURE = byte_chr(MSG_REQUEST_FAILURE) +cMSG_CHANNEL_OPEN = byte_chr(MSG_CHANNEL_OPEN) +cMSG_CHANNEL_OPEN_SUCCESS = byte_chr(MSG_CHANNEL_OPEN_SUCCESS) +cMSG_CHANNEL_OPEN_FAILURE = byte_chr(MSG_CHANNEL_OPEN_FAILURE) +cMSG_CHANNEL_WINDOW_ADJUST = byte_chr(MSG_CHANNEL_WINDOW_ADJUST) +cMSG_CHANNEL_DATA = byte_chr(MSG_CHANNEL_DATA) +cMSG_CHANNEL_EXTENDED_DATA = byte_chr(MSG_CHANNEL_EXTENDED_DATA) +cMSG_CHANNEL_EOF = byte_chr(MSG_CHANNEL_EOF) +cMSG_CHANNEL_CLOSE = byte_chr(MSG_CHANNEL_CLOSE) +cMSG_CHANNEL_REQUEST = byte_chr(MSG_CHANNEL_REQUEST) +cMSG_CHANNEL_SUCCESS = byte_chr(MSG_CHANNEL_SUCCESS) +cMSG_CHANNEL_FAILURE = byte_chr(MSG_CHANNEL_FAILURE) + +# for debugging: +MSG_NAMES = { + MSG_DISCONNECT: "disconnect", + MSG_IGNORE: "ignore", + MSG_UNIMPLEMENTED: "unimplemented", + MSG_DEBUG: "debug", + MSG_SERVICE_REQUEST: "service-request", + MSG_SERVICE_ACCEPT: "service-accept", + MSG_KEXINIT: "kexinit", + MSG_EXT_INFO: "ext-info", + MSG_NEWKEYS: "newkeys", + 30: "kex30", + 31: "kex31", + 32: "kex32", + 33: "kex33", + 34: "kex34", + 40: "kex40", + 41: "kex41", + MSG_USERAUTH_REQUEST: "userauth-request", + MSG_USERAUTH_FAILURE: "userauth-failure", + MSG_USERAUTH_SUCCESS: "userauth-success", + MSG_USERAUTH_BANNER: "userauth--banner", + MSG_USERAUTH_PK_OK: "userauth-60(pk-ok/info-request)", + MSG_USERAUTH_INFO_RESPONSE: "userauth-info-response", + MSG_GLOBAL_REQUEST: "global-request", + MSG_REQUEST_SUCCESS: "request-success", + MSG_REQUEST_FAILURE: "request-failure", + MSG_CHANNEL_OPEN: "channel-open", + MSG_CHANNEL_OPEN_SUCCESS: "channel-open-success", + MSG_CHANNEL_OPEN_FAILURE: "channel-open-failure", + MSG_CHANNEL_WINDOW_ADJUST: "channel-window-adjust", + MSG_CHANNEL_DATA: "channel-data", + MSG_CHANNEL_EXTENDED_DATA: "channel-extended-data", + MSG_CHANNEL_EOF: "channel-eof", + MSG_CHANNEL_CLOSE: "channel-close", + MSG_CHANNEL_REQUEST: "channel-request", + MSG_CHANNEL_SUCCESS: "channel-success", + MSG_CHANNEL_FAILURE: "channel-failure", + MSG_USERAUTH_GSSAPI_RESPONSE: "userauth-gssapi-response", + MSG_USERAUTH_GSSAPI_TOKEN: "userauth-gssapi-token", + MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE: "userauth-gssapi-exchange-complete", + MSG_USERAUTH_GSSAPI_ERROR: "userauth-gssapi-error", + MSG_USERAUTH_GSSAPI_ERRTOK: "userauth-gssapi-error-token", + MSG_USERAUTH_GSSAPI_MIC: "userauth-gssapi-mic", +} + + +# authentication request return codes: +AUTH_SUCCESSFUL, AUTH_PARTIALLY_SUCCESSFUL, AUTH_FAILED = range(3) + + +# channel request failed reasons: +( + OPEN_SUCCEEDED, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + OPEN_FAILED_CONNECT_FAILED, + OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, + OPEN_FAILED_RESOURCE_SHORTAGE, +) = range(0, 5) + + +CONNECTION_FAILED_CODE = { + 1: "Administratively prohibited", + 2: "Connect failed", + 3: "Unknown channel type", + 4: "Resource shortage", +} + + +( + DISCONNECT_SERVICE_NOT_AVAILABLE, + DISCONNECT_AUTH_CANCELLED_BY_USER, + DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE, +) = (7, 13, 14) + +zero_byte = byte_chr(0) +one_byte = byte_chr(1) +four_byte = byte_chr(4) +max_byte = byte_chr(0xFF) +cr_byte = byte_chr(13) +linefeed_byte = byte_chr(10) +crlf = cr_byte + linefeed_byte +cr_byte_value = 13 +linefeed_byte_value = 10 + + +xffffffff = 0xFFFFFFFF +x80000000 = 0x80000000 +o666 = 438 +o660 = 432 +o644 = 420 +o600 = 384 +o777 = 511 +o700 = 448 +o70 = 56 + +DEBUG = logging.DEBUG +INFO = logging.INFO +WARNING = logging.WARNING +ERROR = logging.ERROR +CRITICAL = logging.CRITICAL + +# Common IO/select/etc sleep period, in seconds +io_sleep = 0.01 + +DEFAULT_WINDOW_SIZE = 64 * 2**15 +DEFAULT_MAX_PACKET_SIZE = 2**15 + +# lower bound on the max packet size we'll accept from the remote host +# Minimum packet size is 32768 bytes according to +# http://www.ietf.org/rfc/rfc4254.txt +MIN_WINDOW_SIZE = 2**15 + +# However, according to http://www.ietf.org/rfc/rfc4253.txt it is perfectly +# legal to accept a size much smaller, as OpenSSH client does as size 16384. +MIN_PACKET_SIZE = 2**12 + +# Max windows size according to http://www.ietf.org/rfc/rfc4254.txt +MAX_WINDOW_SIZE = 2**32 - 1 diff --git a/.venv/lib/python3.9/site-packages/paramiko/compress.py b/.venv/lib/python3.9/site-packages/paramiko/compress.py new file mode 100644 index 0000000..18ff484 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/compress.py @@ -0,0 +1,40 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Compression implementations for a Transport. +""" + +import zlib + + +class ZlibCompressor: + def __init__(self): + # Use the default level of zlib compression + self.z = zlib.compressobj() + + def __call__(self, data): + return self.z.compress(data) + self.z.flush(zlib.Z_FULL_FLUSH) + + +class ZlibDecompressor: + def __init__(self): + self.z = zlib.decompressobj() + + def __call__(self, data): + return self.z.decompress(data) diff --git a/.venv/lib/python3.9/site-packages/paramiko/config.py b/.venv/lib/python3.9/site-packages/paramiko/config.py new file mode 100644 index 0000000..8ab55c6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/config.py @@ -0,0 +1,696 @@ +# Copyright (C) 2006-2007 Robey Pointer +# Copyright (C) 2012 Olle Lundberg +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Configuration file (aka ``ssh_config``) support. +""" + +import fnmatch +import getpass +import os +import re +import shlex +import socket +from hashlib import sha1 +from io import StringIO +from functools import partial + +invoke, invoke_import_error = None, None +try: + import invoke +except ImportError as e: + invoke_import_error = e + +from .ssh_exception import CouldNotCanonicalize, ConfigParseError + + +SSH_PORT = 22 + + +class SSHConfig: + """ + Representation of config information as stored in the format used by + OpenSSH. Queries can be made via `lookup`. The format is described in + OpenSSH's ``ssh_config`` man page. This class is provided primarily as a + convenience to posix users (since the OpenSSH format is a de-facto + standard on posix) but should work fine on Windows too. + + .. versionadded:: 1.6 + """ + + SETTINGS_REGEX = re.compile(r"(\w+)(?:\s*=\s*|\s+)(.+)") + + # TODO: do a full scan of ssh.c & friends to make sure we're fully + # compatible across the board, e.g. OpenSSH 8.1 added %n to ProxyCommand. + TOKENS_BY_CONFIG_KEY = { + "controlpath": ["%C", "%h", "%l", "%L", "%n", "%p", "%r", "%u"], + "hostname": ["%h"], + "identityfile": ["%C", "~", "%d", "%h", "%l", "%u", "%r"], + "proxycommand": ["~", "%h", "%p", "%r"], + "proxyjump": ["%h", "%p", "%r"], + # Doesn't seem worth making this 'special' for now, it will fit well + # enough (no actual match-exec config key to be confused with). + "match-exec": ["%C", "%d", "%h", "%L", "%l", "%n", "%p", "%r", "%u"], + } + + def __init__(self): + """ + Create a new OpenSSH config object. + + Note: the newer alternate constructors `from_path`, `from_file` and + `from_text` are simpler to use, as they parse on instantiation. For + example, instead of:: + + config = SSHConfig() + config.parse(open("some-path.config") + + you could:: + + config = SSHConfig.from_file(open("some-path.config")) + # Or more directly: + config = SSHConfig.from_path("some-path.config") + # Or if you have arbitrary ssh_config text from some other source: + config = SSHConfig.from_text("Host foo\\n\\tUser bar") + """ + self._config = [] + + @classmethod + def from_text(cls, text): + """ + Create a new, parsed `SSHConfig` from ``text`` string. + + .. versionadded:: 2.7 + """ + return cls.from_file(StringIO(text)) + + @classmethod + def from_path(cls, path): + """ + Create a new, parsed `SSHConfig` from the file found at ``path``. + + .. versionadded:: 2.7 + """ + with open(path) as flo: + return cls.from_file(flo) + + @classmethod + def from_file(cls, flo): + """ + Create a new, parsed `SSHConfig` from file-like object ``flo``. + + .. versionadded:: 2.7 + """ + obj = cls() + obj.parse(flo) + return obj + + def parse(self, file_obj): + """ + Read an OpenSSH config from the given file object. + + :param file_obj: a file-like object to read the config file from + """ + # Start out w/ implicit/anonymous global host-like block to hold + # anything not contained by an explicit one. + context = {"host": ["*"], "config": {}} + for line in file_obj: + # Strip any leading or trailing whitespace from the line. + # Refer to https://github.com/paramiko/paramiko/issues/499 + line = line.strip() + # Skip blanks, comments + if not line or line.startswith("#"): + continue + + # Parse line into key, value + match = re.match(self.SETTINGS_REGEX, line) + if not match: + raise ConfigParseError("Unparsable line {}".format(line)) + key = match.group(1).lower() + value = match.group(2) + + # Host keyword triggers switch to new block/context + if key in ("host", "match"): + self._config.append(context) + context = {"config": {}} + if key == "host": + # TODO 4.0: make these real objects or at least name this + # "hosts" to acknowledge it's an iterable. (Doing so prior + # to 3.0, despite it being a private API, feels bad - + # surely such an old codebase has folks actually relying on + # these keys.) + context["host"] = self._get_hosts(value) + else: + context["matches"] = self._get_matches(value) + # Special-case for noop ProxyCommands + elif key == "proxycommand" and value.lower() == "none": + # Store 'none' as None - not as a string implying that the + # proxycommand is the literal shell command "none"! + context["config"][key] = None + # All other keywords get stored, directly or via append + else: + if value.startswith('"') and value.endswith('"'): + value = value[1:-1] + + # identityfile, localforward, remoteforward keys are special + # cases, since they are allowed to be specified multiple times + # and they should be tried in order of specification. + if key in ["identityfile", "localforward", "remoteforward"]: + if key in context["config"]: + context["config"][key].append(value) + else: + context["config"][key] = [value] + elif key not in context["config"]: + context["config"][key] = value + # Store last 'open' block and we're done + self._config.append(context) + + def lookup(self, hostname): + """ + Return a dict (`SSHConfigDict`) of config options for a given hostname. + + The host-matching rules of OpenSSH's ``ssh_config`` man page are used: + For each parameter, the first obtained value will be used. The + configuration files contain sections separated by ``Host`` and/or + ``Match`` specifications, and that section is only applied for hosts + which match the given patterns or keywords + + Since the first obtained value for each parameter is used, more host- + specific declarations should be given near the beginning of the file, + and general defaults at the end. + + The keys in the returned dict are all normalized to lowercase (look for + ``"port"``, not ``"Port"``. The values are processed according to the + rules for substitution variable expansion in ``ssh_config``. + + Finally, please see the docs for `SSHConfigDict` for deeper info on + features such as optional type conversion methods, e.g.:: + + conf = my_config.lookup('myhost') + assert conf['passwordauthentication'] == 'yes' + assert conf.as_bool('passwordauthentication') is True + + .. note:: + If there is no explicitly configured ``HostName`` value, it will be + set to the being-looked-up hostname, which is as close as we can + get to OpenSSH's behavior around that particular option. + + :param str hostname: the hostname to lookup + + .. versionchanged:: 2.5 + Returns `SSHConfigDict` objects instead of dict literals. + .. versionchanged:: 2.7 + Added canonicalization support. + .. versionchanged:: 2.7 + Added ``Match`` support. + .. versionchanged:: 3.3 + Added ``Match final`` support. + """ + # First pass + options = self._lookup(hostname=hostname) + # Inject HostName if it was not set (this used to be done incidentally + # during tokenization, for some reason). + if "hostname" not in options: + options["hostname"] = hostname + # Handle canonicalization + canon = options.get("canonicalizehostname", None) in ("yes", "always") + maxdots = int(options.get("canonicalizemaxdots", 1)) + if canon and hostname.count(".") <= maxdots: + # NOTE: OpenSSH manpage does not explicitly state this, but its + # implementation for CanonicalDomains is 'split on any whitespace'. + domains = options["canonicaldomains"].split() + hostname = self.canonicalize(hostname, options, domains) + # Overwrite HostName again here (this is also what OpenSSH does) + options["hostname"] = hostname + options = self._lookup( + hostname, options, canonical=True, final=True + ) + else: + options = self._lookup( + hostname, options, canonical=False, final=True + ) + return options + + def _lookup(self, hostname, options=None, canonical=False, final=False): + # Init + if options is None: + options = SSHConfigDict() + # Iterate all stanzas, applying any that match, in turn (so that things + # like Match can reference currently understood state) + for context in self._config: + if not ( + self._pattern_matches(context.get("host", []), hostname) + or self._does_match( + context.get("matches", []), + hostname, + canonical, + final, + options, + ) + ): + continue + for key, value in context["config"].items(): + if key not in options: + # Create a copy of the original value, + # else it will reference the original list + # in self._config and update that value too + # when the extend() is being called. + options[key] = value[:] if value is not None else value + elif key == "identityfile": + options[key].extend( + x for x in value if x not in options[key] + ) + if final: + # Expand variables in resulting values + # (besides 'Match exec' which was already handled above) + options = self._expand_variables(options, hostname) + return options + + def canonicalize(self, hostname, options, domains): + """ + Return canonicalized version of ``hostname``. + + :param str hostname: Target hostname. + :param options: An `SSHConfigDict` from a previous lookup pass. + :param domains: List of domains (e.g. ``["paramiko.org"]``). + + :returns: A canonicalized hostname if one was found, else ``None``. + + .. versionadded:: 2.7 + """ + found = False + for domain in domains: + candidate = "{}.{}".format(hostname, domain) + family_specific = _addressfamily_host_lookup(candidate, options) + if family_specific is not None: + # TODO: would we want to dig deeper into other results? e.g. to + # find something that satisfies PermittedCNAMEs when that is + # implemented? + found = family_specific[0] + else: + # TODO: what does ssh use here and is there a reason to use + # that instead of gethostbyname? + try: + found = socket.gethostbyname(candidate) + except socket.gaierror: + pass + if found: + # TODO: follow CNAME (implied by found != candidate?) if + # CanonicalizePermittedCNAMEs allows it + return candidate + # If we got here, it means canonicalization failed. + # When CanonicalizeFallbackLocal is undefined or 'yes', we just spit + # back the original hostname. + if options.get("canonicalizefallbacklocal", "yes") == "yes": + return hostname + # And here, we failed AND fallback was set to a non-yes value, so we + # need to get mad. + raise CouldNotCanonicalize(hostname) + + def get_hostnames(self): + """ + Return the set of literal hostnames defined in the SSH config (both + explicit hostnames and wildcard entries). + """ + hosts = set() + for entry in self._config: + hosts.update(entry["host"]) + return hosts + + def _pattern_matches(self, patterns, target): + # Convenience auto-splitter if not already a list + if hasattr(patterns, "split"): + patterns = patterns.split(",") + match = False + for pattern in patterns: + # Short-circuit if target matches a negated pattern + if pattern.startswith("!") and fnmatch.fnmatch( + target, pattern[1:] + ): + return False + # Flag a match, but continue (in case of later negation) if regular + # match occurs + elif fnmatch.fnmatch(target, pattern): + match = True + return match + + def _does_match( + self, match_list, target_hostname, canonical, final, options + ): + matched = [] + candidates = match_list[:] + local_username = getpass.getuser() + while candidates: + candidate = candidates.pop(0) + passed = None + # Obtain latest host/user value every loop, so later Match may + # reference values assigned within a prior Match. + configured_host = options.get("hostname", None) + configured_user = options.get("user", None) + type_, param = candidate["type"], candidate["param"] + # Canonical is a hard pass/fail based on whether this is a + # canonicalized re-lookup. + if type_ == "canonical": + if self._should_fail(canonical, candidate): + return False + if type_ == "final": + passed = final + # The parse step ensures we only see this by itself or after + # canonical, so it's also an easy hard pass. (No negation here as + # that would be uh, pretty weird?) + elif type_ == "all": + return True + # From here, we are testing various non-hard criteria, + # short-circuiting only on fail + elif type_ == "host": + hostval = configured_host or target_hostname + passed = self._pattern_matches(param, hostval) + elif type_ == "originalhost": + passed = self._pattern_matches(param, target_hostname) + elif type_ == "user": + user = configured_user or local_username + passed = self._pattern_matches(param, user) + elif type_ == "localuser": + passed = self._pattern_matches(param, local_username) + elif type_ == "exec": + exec_cmd = self._tokenize( + options, target_hostname, "match-exec", param + ) + # This is the laziest spot in which we can get mad about an + # inability to import Invoke. + if invoke is None: + raise invoke_import_error + # Like OpenSSH, we 'redirect' stdout but let stderr bubble up + passed = invoke.run(exec_cmd, hide="stdout", warn=True).ok + # Tackle any 'passed, but was negated' results from above + if passed is not None and self._should_fail(passed, candidate): + return False + # Made it all the way here? Everything matched! + matched.append(candidate) + # Did anything match? (To be treated as bool, usually.) + return matched + + def _should_fail(self, would_pass, candidate): + return would_pass if candidate["negate"] else not would_pass + + def _tokenize(self, config, target_hostname, key, value): + """ + Tokenize a string based on current config/hostname data. + + :param config: Current config data. + :param target_hostname: Original target connection hostname. + :param key: Config key being tokenized (used to filter token list). + :param value: Config value being tokenized. + + :returns: The tokenized version of the input ``value`` string. + """ + allowed_tokens = self._allowed_tokens(key) + # Short-circuit if no tokenization possible + if not allowed_tokens: + return value + # Obtain potentially configured hostname, for use with %h. + # Special-case where we are tokenizing the hostname itself, to avoid + # replacing %h with a %h-bearing value, etc. + configured_hostname = target_hostname + if key != "hostname": + configured_hostname = config.get("hostname", configured_hostname) + # Ditto the rest of the source values + if "port" in config: + port = config["port"] + else: + port = SSH_PORT + user = getpass.getuser() + if "user" in config: + remoteuser = config["user"] + else: + remoteuser = user + local_hostname = socket.gethostname().split(".")[0] + local_fqdn = LazyFqdn(config, local_hostname) + homedir = os.path.expanduser("~") + tohash = local_hostname + target_hostname + repr(port) + remoteuser + # The actual tokens! + replacements = { + # TODO: %%??? + "%C": sha1(tohash.encode()).hexdigest(), + "%d": homedir, + "%h": configured_hostname, + # TODO: %i? + "%L": local_hostname, + "%l": local_fqdn, + # also this is pseudo buggy when not in Match exec mode so document + # that. also WHY is that the case?? don't we do all of this late? + "%n": target_hostname, + "%p": port, + "%r": remoteuser, + # TODO: %T? don't believe this is possible however + "%u": user, + "~": homedir, + } + # Do the thing with the stuff + tokenized = value + for find, replace in replacements.items(): + if find not in allowed_tokens: + continue + tokenized = tokenized.replace(find, str(replace)) + # TODO: log? eg that value -> tokenized + return tokenized + + def _allowed_tokens(self, key): + """ + Given config ``key``, return list of token strings to tokenize. + + .. note:: + This feels like it wants to eventually go away, but is used to + preserve as-strict-as-possible compatibility with OpenSSH, which + for whatever reason only applies some tokens to some config keys. + """ + return self.TOKENS_BY_CONFIG_KEY.get(key, []) + + def _expand_variables(self, config, target_hostname): + """ + Return a dict of config options with expanded substitutions + for a given original & current target hostname. + + Please refer to :doc:`/api/config` for details. + + :param dict config: the currently parsed config + :param str hostname: the hostname whose config is being looked up + """ + for k in config: + if config[k] is None: + continue + tokenizer = partial(self._tokenize, config, target_hostname, k) + if isinstance(config[k], list): + for i, value in enumerate(config[k]): + config[k][i] = tokenizer(value) + else: + config[k] = tokenizer(config[k]) + return config + + def _get_hosts(self, host): + """ + Return a list of host_names from host value. + """ + try: + return shlex.split(host) + except ValueError: + raise ConfigParseError("Unparsable host {}".format(host)) + + def _get_matches(self, match): + """ + Parse a specific Match config line into a list-of-dicts for its values. + + Performs some parse-time validation as well. + """ + matches = [] + tokens = shlex.split(match) + while tokens: + match = {"type": None, "param": None, "negate": False} + type_ = tokens.pop(0) + # Handle per-keyword negation + if type_.startswith("!"): + match["negate"] = True + type_ = type_[1:] + match["type"] = type_ + # all/canonical have no params (everything else does) + if type_ in ("all", "canonical", "final"): + matches.append(match) + continue + if not tokens: + raise ConfigParseError( + "Missing parameter to Match '{}' keyword".format(type_) + ) + match["param"] = tokens.pop(0) + matches.append(match) + # Perform some (easier to do now than in the middle) validation that is + # better handled here than at lookup time. + keywords = [x["type"] for x in matches] + if "all" in keywords: + allowable = ("all", "canonical") + ok, bad = ( + list(filter(lambda x: x in allowable, keywords)), + list(filter(lambda x: x not in allowable, keywords)), + ) + err = None + if any(bad): + err = "Match does not allow 'all' mixed with anything but 'canonical'" # noqa + elif "canonical" in ok and ok.index("canonical") > ok.index("all"): + err = "Match does not allow 'all' before 'canonical'" + if err is not None: + raise ConfigParseError(err) + return matches + + +def _addressfamily_host_lookup(hostname, options): + """ + Try looking up ``hostname`` in an IPv4 or IPv6 specific manner. + + This is an odd duck due to needing use in two divergent use cases. It looks + up ``AddressFamily`` in ``options`` and if it is ``inet`` or ``inet6``, + this function uses `socket.getaddrinfo` to perform a family-specific + lookup, returning the result if successful. + + In any other situation -- lookup failure, or ``AddressFamily`` being + unspecified or ``any`` -- ``None`` is returned instead and the caller is + expected to do something situation-appropriate like calling + `socket.gethostbyname`. + + :param str hostname: Hostname to look up. + :param options: `SSHConfigDict` instance w/ parsed options. + :returns: ``getaddrinfo``-style tuples, or ``None``, depending. + """ + address_family = options.get("addressfamily", "any").lower() + if address_family == "any": + return + try: + family = socket.AF_INET6 + if address_family == "inet": + family = socket.AF_INET + return socket.getaddrinfo( + hostname, + None, + family, + socket.SOCK_DGRAM, + socket.IPPROTO_IP, + socket.AI_CANONNAME, + ) + except socket.gaierror: + pass + + +class LazyFqdn: + """ + Returns the host's fqdn on request as string. + """ + + def __init__(self, config, host=None): + self.fqdn = None + self.config = config + self.host = host + + def __str__(self): + if self.fqdn is None: + # + # If the SSH config contains AddressFamily, use that when + # determining the local host's FQDN. Using socket.getfqdn() from + # the standard library is the most general solution, but can + # result in noticeable delays on some platforms when IPv6 is + # misconfigured or not available, as it calls getaddrinfo with no + # address family specified, so both IPv4 and IPv6 are checked. + # + + # Handle specific option + fqdn = None + results = _addressfamily_host_lookup(self.host, self.config) + if results is not None: + for res in results: + af, socktype, proto, canonname, sa = res + if canonname and "." in canonname: + fqdn = canonname + break + # Handle 'any' / unspecified / lookup failure + if fqdn is None: + fqdn = socket.getfqdn() + # Cache + self.fqdn = fqdn + return self.fqdn + + +class SSHConfigDict(dict): + """ + A dictionary wrapper/subclass for per-host configuration structures. + + This class introduces some usage niceties for consumers of `SSHConfig`, + specifically around the issue of variable type conversions: normal value + access yields strings, but there are now methods such as `as_bool` and + `as_int` that yield casted values instead. + + For example, given the following ``ssh_config`` file snippet:: + + Host foo.example.com + PasswordAuthentication no + Compression yes + ServerAliveInterval 60 + + the following code highlights how you can access the raw strings as well as + usefully Python type-casted versions (recalling that keys are all + normalized to lowercase first):: + + my_config = SSHConfig() + my_config.parse(open('~/.ssh/config')) + conf = my_config.lookup('foo.example.com') + + assert conf['passwordauthentication'] == 'no' + assert conf.as_bool('passwordauthentication') is False + assert conf['compression'] == 'yes' + assert conf.as_bool('compression') is True + assert conf['serveraliveinterval'] == '60' + assert conf.as_int('serveraliveinterval') == 60 + + .. versionadded:: 2.5 + """ + + def as_bool(self, key): + """ + Express given key's value as a boolean type. + + Typically, this is used for ``ssh_config``'s pseudo-boolean values + which are either ``"yes"`` or ``"no"``. In such cases, ``"yes"`` yields + ``True`` and any other value becomes ``False``. + + .. note:: + If (for whatever reason) the stored value is already boolean in + nature, it's simply returned. + + .. versionadded:: 2.5 + """ + val = self[key] + if isinstance(val, bool): + return val + return val.lower() == "yes" + + def as_int(self, key): + """ + Express given key's value as an integer, if possible. + + This method will raise ``ValueError`` or similar if the value is not + int-appropriate, same as the builtin `int` type. + + .. versionadded:: 2.5 + """ + return int(self[key]) diff --git a/.venv/lib/python3.9/site-packages/paramiko/ecdsakey.py b/.venv/lib/python3.9/site-packages/paramiko/ecdsakey.py new file mode 100644 index 0000000..6fd95fa --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/ecdsakey.py @@ -0,0 +1,339 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +ECDSA keys +""" + +from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.asymmetric.utils import ( + decode_dss_signature, + encode_dss_signature, +) + +from paramiko.common import four_byte +from paramiko.message import Message +from paramiko.pkey import PKey +from paramiko.ssh_exception import SSHException +from paramiko.util import deflate_long + + +class _ECDSACurve: + """ + Represents a specific ECDSA Curve (nistp256, nistp384, etc). + + Handles the generation of the key format identifier and the selection of + the proper hash function. Also grabs the proper curve from the 'ecdsa' + package. + """ + + def __init__(self, curve_class, nist_name): + self.nist_name = nist_name + self.key_length = curve_class.key_size + + # Defined in RFC 5656 6.2 + self.key_format_identifier = "ecdsa-sha2-" + self.nist_name + + # Defined in RFC 5656 6.2.1 + if self.key_length <= 256: + self.hash_object = hashes.SHA256 + elif self.key_length <= 384: + self.hash_object = hashes.SHA384 + else: + self.hash_object = hashes.SHA512 + + self.curve_class = curve_class + + +class _ECDSACurveSet: + """ + A collection to hold the ECDSA curves. Allows querying by oid and by key + format identifier. The two ways in which ECDSAKey needs to be able to look + up curves. + """ + + def __init__(self, ecdsa_curves): + self.ecdsa_curves = ecdsa_curves + + def get_key_format_identifier_list(self): + return [curve.key_format_identifier for curve in self.ecdsa_curves] + + def get_by_curve_class(self, curve_class): + for curve in self.ecdsa_curves: + if curve.curve_class == curve_class: + return curve + + def get_by_key_format_identifier(self, key_format_identifier): + for curve in self.ecdsa_curves: + if curve.key_format_identifier == key_format_identifier: + return curve + + def get_by_key_length(self, key_length): + for curve in self.ecdsa_curves: + if curve.key_length == key_length: + return curve + + +class ECDSAKey(PKey): + """ + Representation of an ECDSA key which can be used to sign and verify SSH2 + data. + """ + + _ECDSA_CURVES = _ECDSACurveSet( + [ + _ECDSACurve(ec.SECP256R1, "nistp256"), + _ECDSACurve(ec.SECP384R1, "nistp384"), + _ECDSACurve(ec.SECP521R1, "nistp521"), + ] + ) + + def __init__( + self, + msg=None, + data=None, + filename=None, + password=None, + vals=None, + file_obj=None, + # TODO 4.0: remove; it does nothing since porting to cryptography.io + validate_point=True, + ): + self.verifying_key = None + self.signing_key = None + self.public_blob = None + if file_obj is not None: + self._from_private_key(file_obj, password) + return + if filename is not None: + self._from_private_key_file(filename, password) + return + if (msg is None) and (data is not None): + msg = Message(data) + if vals is not None: + self.signing_key, self.verifying_key = vals + c_class = self.signing_key.curve.__class__ + self.ecdsa_curve = self._ECDSA_CURVES.get_by_curve_class(c_class) + else: + # Must set ecdsa_curve first; subroutines called herein may need to + # spit out our get_name(), which relies on this. + key_type = msg.get_text() + # But this also means we need to hand it a real key/curve + # identifier, so strip out any cert business. (NOTE: could push + # that into _ECDSACurveSet.get_by_key_format_identifier(), but it + # feels more correct to do it here?) + suffix = "-cert-v01@openssh.com" + if key_type.endswith(suffix): + key_type = key_type[: -len(suffix)] + self.ecdsa_curve = self._ECDSA_CURVES.get_by_key_format_identifier( + key_type + ) + key_types = self._ECDSA_CURVES.get_key_format_identifier_list() + cert_types = [ + "{}-cert-v01@openssh.com".format(x) for x in key_types + ] + self._check_type_and_load_cert( + msg=msg, key_type=key_types, cert_type=cert_types + ) + curvename = msg.get_text() + if curvename != self.ecdsa_curve.nist_name: + raise SSHException( + "Can't handle curve of type {}".format(curvename) + ) + + pointinfo = msg.get_binary() + try: + key = ec.EllipticCurvePublicKey.from_encoded_point( + self.ecdsa_curve.curve_class(), pointinfo + ) + self.verifying_key = key + except ValueError: + raise SSHException("Invalid public key") + + @classmethod + def identifiers(cls): + return cls._ECDSA_CURVES.get_key_format_identifier_list() + + # TODO 4.0: deprecate/remove + @classmethod + def supported_key_format_identifiers(cls): + return cls.identifiers() + + def asbytes(self): + key = self.verifying_key + m = Message() + m.add_string(self.ecdsa_curve.key_format_identifier) + m.add_string(self.ecdsa_curve.nist_name) + + numbers = key.public_numbers() + + key_size_bytes = (key.curve.key_size + 7) // 8 + + x_bytes = deflate_long(numbers.x, add_sign_padding=False) + x_bytes = b"\x00" * (key_size_bytes - len(x_bytes)) + x_bytes + + y_bytes = deflate_long(numbers.y, add_sign_padding=False) + y_bytes = b"\x00" * (key_size_bytes - len(y_bytes)) + y_bytes + + point_str = four_byte + x_bytes + y_bytes + m.add_string(point_str) + return m.asbytes() + + def __str__(self): + return self.asbytes() + + @property + def _fields(self): + return ( + self.get_name(), + self.verifying_key.public_numbers().x, + self.verifying_key.public_numbers().y, + ) + + def get_name(self): + return self.ecdsa_curve.key_format_identifier + + def get_bits(self): + return self.ecdsa_curve.key_length + + def can_sign(self): + return self.signing_key is not None + + def sign_ssh_data(self, data, algorithm=None): + ecdsa = ec.ECDSA(self.ecdsa_curve.hash_object()) + sig = self.signing_key.sign(data, ecdsa) + r, s = decode_dss_signature(sig) + + m = Message() + m.add_string(self.ecdsa_curve.key_format_identifier) + m.add_string(self._sigencode(r, s)) + return m + + def verify_ssh_sig(self, data, msg): + if msg.get_text() != self.ecdsa_curve.key_format_identifier: + return False + sig = msg.get_binary() + sigR, sigS = self._sigdecode(sig) + signature = encode_dss_signature(sigR, sigS) + + try: + self.verifying_key.verify( + signature, data, ec.ECDSA(self.ecdsa_curve.hash_object()) + ) + except InvalidSignature: + return False + else: + return True + + def write_private_key_file(self, filename, password=None): + self._write_private_key_file( + filename, + self.signing_key, + serialization.PrivateFormat.TraditionalOpenSSL, + password=password, + ) + + def write_private_key(self, file_obj, password=None): + self._write_private_key( + file_obj, + self.signing_key, + serialization.PrivateFormat.TraditionalOpenSSL, + password=password, + ) + + @classmethod + def generate(cls, curve=ec.SECP256R1(), progress_func=None, bits=None): + """ + Generate a new private ECDSA key. This factory function can be used to + generate a new host key or authentication key. + + :param progress_func: Not used for this type of key. + :returns: A new private key (`.ECDSAKey`) object + """ + if bits is not None: + curve = cls._ECDSA_CURVES.get_by_key_length(bits) + if curve is None: + raise ValueError("Unsupported key length: {:d}".format(bits)) + curve = curve.curve_class() + + private_key = ec.generate_private_key(curve, backend=default_backend()) + return ECDSAKey(vals=(private_key, private_key.public_key())) + + # ...internals... + + def _from_private_key_file(self, filename, password): + data = self._read_private_key_file("EC", filename, password) + self._decode_key(data) + + def _from_private_key(self, file_obj, password): + data = self._read_private_key("EC", file_obj, password) + self._decode_key(data) + + def _decode_key(self, data): + pkformat, data = data + if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL: + try: + key = serialization.load_der_private_key( + data, password=None, backend=default_backend() + ) + except ( + ValueError, + AssertionError, + TypeError, + UnsupportedAlgorithm, + ) as e: + raise SSHException(str(e)) + elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH: + try: + msg = Message(data) + curve_name = msg.get_text() + verkey = msg.get_binary() # noqa: F841 + sigkey = msg.get_mpint() + name = "ecdsa-sha2-" + curve_name + curve = self._ECDSA_CURVES.get_by_key_format_identifier(name) + if not curve: + raise SSHException("Invalid key curve identifier") + key = ec.derive_private_key( + sigkey, curve.curve_class(), default_backend() + ) + except Exception as e: + # PKey._read_private_key_openssh() should check or return + # keytype - parsing could fail for any reason due to wrong type + raise SSHException(str(e)) + else: + self._got_bad_key_format_id(pkformat) + + self.signing_key = key + self.verifying_key = key.public_key() + curve_class = key.curve.__class__ + self.ecdsa_curve = self._ECDSA_CURVES.get_by_curve_class(curve_class) + + def _sigencode(self, r, s): + msg = Message() + msg.add_mpint(r) + msg.add_mpint(s) + return msg.asbytes() + + def _sigdecode(self, sig): + msg = Message(sig) + r = msg.get_mpint() + s = msg.get_mpint() + return r, s diff --git a/.venv/lib/python3.9/site-packages/paramiko/ed25519key.py b/.venv/lib/python3.9/site-packages/paramiko/ed25519key.py new file mode 100644 index 0000000..e5e81ac --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/ed25519key.py @@ -0,0 +1,212 @@ +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import bcrypt + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher + +import nacl.signing + +from paramiko.message import Message +from paramiko.pkey import PKey, OPENSSH_AUTH_MAGIC, _unpad_openssh +from paramiko.util import b +from paramiko.ssh_exception import SSHException, PasswordRequiredException + + +class Ed25519Key(PKey): + """ + Representation of an `Ed25519 `_ key. + + .. note:: + Ed25519 key support was added to OpenSSH in version 6.5. + + .. versionadded:: 2.2 + .. versionchanged:: 2.3 + Added a ``file_obj`` parameter to match other key classes. + """ + + name = "ssh-ed25519" + + def __init__( + self, msg=None, data=None, filename=None, password=None, file_obj=None + ): + self.public_blob = None + verifying_key = signing_key = None + if msg is None and data is not None: + msg = Message(data) + if msg is not None: + self._check_type_and_load_cert( + msg=msg, + key_type=self.name, + cert_type="ssh-ed25519-cert-v01@openssh.com", + ) + verifying_key = nacl.signing.VerifyKey(msg.get_binary()) + elif filename is not None: + with open(filename, "r") as f: + pkformat, data = self._read_private_key("OPENSSH", f) + elif file_obj is not None: + pkformat, data = self._read_private_key("OPENSSH", file_obj) + + if filename or file_obj: + signing_key = self._parse_signing_key_data(data, password) + + if signing_key is None and verifying_key is None: + raise ValueError("need a key") + + self._signing_key = signing_key + self._verifying_key = verifying_key + + def _parse_signing_key_data(self, data, password): + from paramiko.transport import Transport + + # We may eventually want this to be usable for other key types, as + # OpenSSH moves to it, but for now this is just for Ed25519 keys. + # This format is described here: + # https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.key + # The description isn't totally complete, and I had to refer to the + # source for a full implementation. + message = Message(data) + if message.get_bytes(len(OPENSSH_AUTH_MAGIC)) != OPENSSH_AUTH_MAGIC: + raise SSHException("Invalid key") + + ciphername = message.get_text() + kdfname = message.get_text() + kdfoptions = message.get_binary() + num_keys = message.get_int() + + if kdfname == "none": + # kdfname of "none" must have an empty kdfoptions, the ciphername + # must be "none" + if kdfoptions or ciphername != "none": + raise SSHException("Invalid key") + elif kdfname == "bcrypt": + if not password: + raise PasswordRequiredException( + "Private key file is encrypted" + ) + kdf = Message(kdfoptions) + bcrypt_salt = kdf.get_binary() + bcrypt_rounds = kdf.get_int() + else: + raise SSHException("Invalid key") + + if ciphername != "none" and ciphername not in Transport._cipher_info: + raise SSHException("Invalid key") + + public_keys = [] + for _ in range(num_keys): + pubkey = Message(message.get_binary()) + if pubkey.get_text() != self.name: + raise SSHException("Invalid key") + public_keys.append(pubkey.get_binary()) + + private_ciphertext = message.get_binary() + if ciphername == "none": + private_data = private_ciphertext + else: + cipher = Transport._cipher_info[ciphername] + key = bcrypt.kdf( + password=b(password), + salt=bcrypt_salt, + desired_key_bytes=cipher["key-size"] + cipher["block-size"], + rounds=bcrypt_rounds, + # We can't control how many rounds are on disk, so no sense + # warning about it. + ignore_few_rounds=True, + ) + decryptor = Cipher( + cipher["class"](key[: cipher["key-size"]]), + cipher["mode"](key[cipher["key-size"] :]), + backend=default_backend(), + ).decryptor() + private_data = ( + decryptor.update(private_ciphertext) + decryptor.finalize() + ) + + message = Message(_unpad_openssh(private_data)) + if message.get_int() != message.get_int(): + raise SSHException("Invalid key") + + signing_keys = [] + for i in range(num_keys): + if message.get_text() != self.name: + raise SSHException("Invalid key") + # A copy of the public key, again, ignore. + public = message.get_binary() + key_data = message.get_binary() + # The second half of the key data is yet another copy of the public + # key... + signing_key = nacl.signing.SigningKey(key_data[:32]) + # Verify that all the public keys are the same... + assert ( + signing_key.verify_key.encode() + == public + == public_keys[i] + == key_data[32:] + ) + signing_keys.append(signing_key) + # Comment, ignore. + message.get_binary() + + if len(signing_keys) != 1: + raise SSHException("Invalid key") + return signing_keys[0] + + def asbytes(self): + if self.can_sign(): + v = self._signing_key.verify_key + else: + v = self._verifying_key + m = Message() + m.add_string(self.name) + m.add_string(v.encode()) + return m.asbytes() + + @property + def _fields(self): + if self.can_sign(): + v = self._signing_key.verify_key + else: + v = self._verifying_key + return (self.get_name(), v) + + # TODO 4.0: remove + def get_name(self): + return self.name + + def get_bits(self): + return 256 + + def can_sign(self): + return self._signing_key is not None + + def sign_ssh_data(self, data, algorithm=None): + m = Message() + m.add_string(self.name) + m.add_string(self._signing_key.sign(data).signature) + return m + + def verify_ssh_sig(self, data, msg): + if msg.get_text() != self.name: + return False + + try: + self._verifying_key.verify(data, msg.get_binary()) + except nacl.exceptions.BadSignatureError: + return False + else: + return True diff --git a/.venv/lib/python3.9/site-packages/paramiko/file.py b/.venv/lib/python3.9/site-packages/paramiko/file.py new file mode 100644 index 0000000..a36abb9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/file.py @@ -0,0 +1,528 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +from io import BytesIO + +from paramiko.common import ( + linefeed_byte_value, + crlf, + cr_byte, + linefeed_byte, + cr_byte_value, +) + +from paramiko.util import ClosingContextManager, u + + +class BufferedFile(ClosingContextManager): + """ + Reusable base class to implement Python-style file buffering around a + simpler stream. + """ + + _DEFAULT_BUFSIZE = 8192 + + SEEK_SET = 0 + SEEK_CUR = 1 + SEEK_END = 2 + + FLAG_READ = 0x1 + FLAG_WRITE = 0x2 + FLAG_APPEND = 0x4 + FLAG_BINARY = 0x10 + FLAG_BUFFERED = 0x20 + FLAG_LINE_BUFFERED = 0x40 + FLAG_UNIVERSAL_NEWLINE = 0x80 + + def __init__(self): + self.newlines = None + self._flags = 0 + self._bufsize = self._DEFAULT_BUFSIZE + self._wbuffer = BytesIO() + self._rbuffer = bytes() + self._at_trailing_cr = False + self._closed = False + # pos - position within the file, according to the user + # realpos - position according the OS + # (these may be different because we buffer for line reading) + self._pos = self._realpos = 0 + # size only matters for seekable files + self._size = 0 + + def __del__(self): + self.close() + + def __iter__(self): + """ + Returns an iterator that can be used to iterate over the lines in this + file. This iterator happens to return the file itself, since a file is + its own iterator. + + :raises: ``ValueError`` -- if the file is closed. + """ + if self._closed: + raise ValueError("I/O operation on closed file") + return self + + def close(self): + """ + Close the file. Future read and write operations will fail. + """ + self.flush() + self._closed = True + + def flush(self): + """ + Write out any data in the write buffer. This may do nothing if write + buffering is not turned on. + """ + self._write_all(self._wbuffer.getvalue()) + self._wbuffer = BytesIO() + return + + def __next__(self): + """ + Returns the next line from the input, or raises ``StopIteration`` + when EOF is hit. Unlike python file objects, it's okay to mix + calls to `.next` and `.readline`. + + :raises: ``StopIteration`` -- when the end of the file is reached. + + :returns: + a line (`str`, or `bytes` if the file was opened in binary mode) + read from the file. + """ + line = self.readline() + if not line: + raise StopIteration + return line + + def readable(self): + """ + Check if the file can be read from. + + :returns: + `True` if the file can be read from. If `False`, `read` will raise + an exception. + """ + return (self._flags & self.FLAG_READ) == self.FLAG_READ + + def writable(self): + """ + Check if the file can be written to. + + :returns: + `True` if the file can be written to. If `False`, `write` will + raise an exception. + """ + return (self._flags & self.FLAG_WRITE) == self.FLAG_WRITE + + def seekable(self): + """ + Check if the file supports random access. + + :returns: + `True` if the file supports random access. If `False`, `seek` will + raise an exception. + """ + return False + + def readinto(self, buff): + """ + Read up to ``len(buff)`` bytes into ``bytearray`` *buff* and return the + number of bytes read. + + :returns: + The number of bytes read. + """ + data = self.read(len(buff)) + buff[: len(data)] = data + return len(data) + + def read(self, size=None): + """ + Read at most ``size`` bytes from the file (less if we hit the end of + the file first). If the ``size`` argument is negative or omitted, + read all the remaining data in the file. + + .. note:: + ``'b'`` mode flag is ignored (``self.FLAG_BINARY`` in + ``self._flags``), because SSH treats all files as binary, since we + have no idea what encoding the file is in, or even if the file is + text data. + + :param int size: maximum number of bytes to read + :returns: + data read from the file (as bytes), or an empty string if EOF was + encountered immediately + """ + if self._closed: + raise IOError("File is closed") + if not (self._flags & self.FLAG_READ): + raise IOError("File is not open for reading") + if (size is None) or (size < 0): + # go for broke + result = bytearray(self._rbuffer) + self._rbuffer = bytes() + self._pos += len(result) + while True: + try: + new_data = self._read(self._DEFAULT_BUFSIZE) + except EOFError: + new_data = None + if (new_data is None) or (len(new_data) == 0): + break + result.extend(new_data) + self._realpos += len(new_data) + self._pos += len(new_data) + return bytes(result) + if size <= len(self._rbuffer): + result = self._rbuffer[:size] + self._rbuffer = self._rbuffer[size:] + self._pos += len(result) + return result + while len(self._rbuffer) < size: + read_size = size - len(self._rbuffer) + if self._flags & self.FLAG_BUFFERED: + read_size = max(self._bufsize, read_size) + try: + new_data = self._read(read_size) + except EOFError: + new_data = None + if (new_data is None) or (len(new_data) == 0): + break + self._rbuffer += new_data + self._realpos += len(new_data) + result = self._rbuffer[:size] + self._rbuffer = self._rbuffer[size:] + self._pos += len(result) + return result + + def readline(self, size=None): + """ + Read one entire line from the file. A trailing newline character is + kept in the string (but may be absent when a file ends with an + incomplete line). If the size argument is present and non-negative, it + is a maximum byte count (including the trailing newline) and an + incomplete line may be returned. An empty string is returned only when + EOF is encountered immediately. + + .. note:: + Unlike stdio's ``fgets``, the returned string contains null + characters (``'\\0'``) if they occurred in the input. + + :param int size: maximum length of returned string. + :returns: + next line of the file, or an empty string if the end of the + file has been reached. + + If the file was opened in binary (``'b'``) mode: bytes are returned + Else: the encoding of the file is assumed to be UTF-8 and character + strings (`str`) are returned + """ + # it's almost silly how complex this function is. + if self._closed: + raise IOError("File is closed") + if not (self._flags & self.FLAG_READ): + raise IOError("File not open for reading") + line = self._rbuffer + truncated = False + while True: + if ( + self._at_trailing_cr + and self._flags & self.FLAG_UNIVERSAL_NEWLINE + and len(line) > 0 + ): + # edge case: the newline may be '\r\n' and we may have read + # only the first '\r' last time. + if line[0] == linefeed_byte_value: + line = line[1:] + self._record_newline(crlf) + else: + self._record_newline(cr_byte) + self._at_trailing_cr = False + # check size before looking for a linefeed, in case we already have + # enough. + if (size is not None) and (size >= 0): + if len(line) >= size: + # truncate line + self._rbuffer = line[size:] + line = line[:size] + truncated = True + break + n = size - len(line) + else: + n = self._bufsize + if linefeed_byte in line or ( + self._flags & self.FLAG_UNIVERSAL_NEWLINE and cr_byte in line + ): + break + try: + new_data = self._read(n) + except EOFError: + new_data = None + if (new_data is None) or (len(new_data) == 0): + self._rbuffer = bytes() + self._pos += len(line) + return line if self._flags & self.FLAG_BINARY else u(line) + line += new_data + self._realpos += len(new_data) + # find the newline + pos = line.find(linefeed_byte) + if self._flags & self.FLAG_UNIVERSAL_NEWLINE: + rpos = line.find(cr_byte) + if (rpos >= 0) and (rpos < pos or pos < 0): + pos = rpos + if pos == -1: + # we couldn't find a newline in the truncated string, return it + self._pos += len(line) + return line if self._flags & self.FLAG_BINARY else u(line) + xpos = pos + 1 + if ( + line[pos] == cr_byte_value + and xpos < len(line) + and line[xpos] == linefeed_byte_value + ): + xpos += 1 + # if the string was truncated, _rbuffer needs to have the string after + # the newline character plus the truncated part of the line we stored + # earlier in _rbuffer + if truncated: + self._rbuffer = line[xpos:] + self._rbuffer + else: + self._rbuffer = line[xpos:] + + lf = line[pos:xpos] + line = line[:pos] + linefeed_byte + if (len(self._rbuffer) == 0) and (lf == cr_byte): + # we could read the line up to a '\r' and there could still be a + # '\n' following that we read next time. note that and eat it. + self._at_trailing_cr = True + else: + self._record_newline(lf) + self._pos += len(line) + return line if self._flags & self.FLAG_BINARY else u(line) + + def readlines(self, sizehint=None): + """ + Read all remaining lines using `readline` and return them as a list. + If the optional ``sizehint`` argument is present, instead of reading up + to EOF, whole lines totalling approximately sizehint bytes (possibly + after rounding up to an internal buffer size) are read. + + :param int sizehint: desired maximum number of bytes to read. + :returns: list of lines read from the file. + """ + lines = [] + byte_count = 0 + while True: + line = self.readline() + if len(line) == 0: + break + lines.append(line) + byte_count += len(line) + if (sizehint is not None) and (byte_count >= sizehint): + break + return lines + + def seek(self, offset, whence=0): + """ + Set the file's current position, like stdio's ``fseek``. Not all file + objects support seeking. + + .. note:: + If a file is opened in append mode (``'a'`` or ``'a+'``), any seek + operations will be undone at the next write (as the file position + will move back to the end of the file). + + :param int offset: + position to move to within the file, relative to ``whence``. + :param int whence: + type of movement: 0 = absolute; 1 = relative to the current + position; 2 = relative to the end of the file. + + :raises: ``IOError`` -- if the file doesn't support random access. + """ + raise IOError("File does not support seeking.") + + def tell(self): + """ + Return the file's current position. This may not be accurate or + useful if the underlying file doesn't support random access, or was + opened in append mode. + + :returns: file position (`number ` of bytes). + """ + return self._pos + + def write(self, data): + """ + Write data to the file. If write buffering is on (``bufsize`` was + specified and non-zero), some or all of the data may not actually be + written yet. (Use `flush` or `close` to force buffered data to be + written out.) + + :param data: ``str``/``bytes`` data to write + """ + if isinstance(data, str): + # Accept text and encode as utf-8 for compatibility only. + data = data.encode("utf-8") + if self._closed: + raise IOError("File is closed") + if not (self._flags & self.FLAG_WRITE): + raise IOError("File not open for writing") + if not (self._flags & self.FLAG_BUFFERED): + self._write_all(data) + return + self._wbuffer.write(data) + if self._flags & self.FLAG_LINE_BUFFERED: + # only scan the new data for linefeed, to avoid wasting time. + last_newline_pos = data.rfind(linefeed_byte) + if last_newline_pos >= 0: + wbuf = self._wbuffer.getvalue() + last_newline_pos += len(wbuf) - len(data) + self._write_all(wbuf[: last_newline_pos + 1]) + self._wbuffer = BytesIO() + self._wbuffer.write(wbuf[last_newline_pos + 1 :]) + return + # even if we're line buffering, if the buffer has grown past the + # buffer size, force a flush. + if self._wbuffer.tell() >= self._bufsize: + self.flush() + return + + def writelines(self, sequence): + """ + Write a sequence of strings to the file. The sequence can be any + iterable object producing strings, typically a list of strings. (The + name is intended to match `readlines`; `writelines` does not add line + separators.) + + :param sequence: an iterable sequence of strings. + """ + for line in sequence: + self.write(line) + return + + def xreadlines(self): + """ + Identical to ``iter(f)``. This is a deprecated file interface that + predates Python iterator support. + """ + return self + + @property + def closed(self): + return self._closed + + # ...overrides... + + def _read(self, size): + """ + (subclass override) + Read data from the stream. Return ``None`` or raise ``EOFError`` to + indicate EOF. + """ + raise EOFError() + + def _write(self, data): + """ + (subclass override) + Write data into the stream. + """ + raise IOError("write not implemented") + + def _get_size(self): + """ + (subclass override) + Return the size of the file. This is called from within `_set_mode` + if the file is opened in append mode, so the file position can be + tracked and `seek` and `tell` will work correctly. If the file is + a stream that can't be randomly accessed, you don't need to override + this method, + """ + return 0 + + # ...internals... + + def _set_mode(self, mode="r", bufsize=-1): + """ + Subclasses call this method to initialize the BufferedFile. + """ + # set bufsize in any event, because it's used for readline(). + self._bufsize = self._DEFAULT_BUFSIZE + if bufsize < 0: + # do no buffering by default, because otherwise writes will get + # buffered in a way that will probably confuse people. + bufsize = 0 + if bufsize == 1: + # apparently, line buffering only affects writes. reads are only + # buffered if you call readline (directly or indirectly: iterating + # over a file will indirectly call readline). + self._flags |= self.FLAG_BUFFERED | self.FLAG_LINE_BUFFERED + elif bufsize > 1: + self._bufsize = bufsize + self._flags |= self.FLAG_BUFFERED + self._flags &= ~self.FLAG_LINE_BUFFERED + elif bufsize == 0: + # unbuffered + self._flags &= ~(self.FLAG_BUFFERED | self.FLAG_LINE_BUFFERED) + + if ("r" in mode) or ("+" in mode): + self._flags |= self.FLAG_READ + if ("w" in mode) or ("+" in mode): + self._flags |= self.FLAG_WRITE + if "a" in mode: + self._flags |= self.FLAG_WRITE | self.FLAG_APPEND + self._size = self._get_size() + self._pos = self._realpos = self._size + if "b" in mode: + self._flags |= self.FLAG_BINARY + if "U" in mode: + self._flags |= self.FLAG_UNIVERSAL_NEWLINE + # built-in file objects have this attribute to store which kinds of + # line terminations they've seen: + # + self.newlines = None + + def _write_all(self, raw_data): + # the underlying stream may be something that does partial writes (like + # a socket). + data = memoryview(raw_data) + while len(data) > 0: + count = self._write(data) + data = data[count:] + if self._flags & self.FLAG_APPEND: + self._size += count + self._pos = self._realpos = self._size + else: + self._pos += count + self._realpos += count + return None + + def _record_newline(self, newline): + # silliness about tracking what kinds of newlines we've seen. + # i don't understand why it can be None, a string, or a tuple, instead + # of just always being a tuple, but we'll emulate that behavior anyway. + if not (self._flags & self.FLAG_UNIVERSAL_NEWLINE): + return + if self.newlines is None: + self.newlines = newline + elif self.newlines != newline and isinstance(self.newlines, bytes): + self.newlines = (self.newlines, newline) + elif newline not in self.newlines: + self.newlines += (newline,) diff --git a/.venv/lib/python3.9/site-packages/paramiko/hostkeys.py b/.venv/lib/python3.9/site-packages/paramiko/hostkeys.py new file mode 100644 index 0000000..0bcf6c3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/hostkeys.py @@ -0,0 +1,384 @@ +# Copyright (C) 2006-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +from base64 import encodebytes, decodebytes +import binascii +import os +import re + +from collections.abc import MutableMapping +from hashlib import sha1 +from hmac import HMAC + + +from paramiko.pkey import PKey, UnknownKeyType +from paramiko.util import get_logger, constant_time_bytes_eq, b, u +from paramiko.ssh_exception import SSHException + + +class HostKeys(MutableMapping): + """ + Representation of an OpenSSH-style "known hosts" file. Host keys can be + read from one or more files, and then individual hosts can be looked up to + verify server keys during SSH negotiation. + + A `.HostKeys` object can be treated like a dict; any dict lookup is + equivalent to calling `lookup`. + + .. versionadded:: 1.5.3 + """ + + def __init__(self, filename=None): + """ + Create a new HostKeys object, optionally loading keys from an OpenSSH + style host-key file. + + :param str filename: filename to load host keys from, or ``None`` + """ + # emulate a dict of { hostname: { keytype: PKey } } + self._entries = [] + if filename is not None: + self.load(filename) + + def add(self, hostname, keytype, key): + """ + Add a host key entry to the table. Any existing entry for a + ``(hostname, keytype)`` pair will be replaced. + + :param str hostname: the hostname (or IP) to add + :param str keytype: key type (in ``"ssh-"`` format) + :param .PKey key: the key to add + """ + for e in self._entries: + if (hostname in e.hostnames) and (e.key.get_name() == keytype): + e.key = key + return + self._entries.append(HostKeyEntry([hostname], key)) + + def load(self, filename): + """ + Read a file of known SSH host keys, in the format used by OpenSSH. + This type of file unfortunately doesn't exist on Windows, but on + posix, it will usually be stored in + ``os.path.expanduser("~/.ssh/known_hosts")``. + + If this method is called multiple times, the host keys are merged, + not cleared. So multiple calls to `load` will just call `add`, + replacing any existing entries and adding new ones. + + :param str filename: name of the file to read host keys from + + :raises: ``IOError`` -- if there was an error reading the file + """ + with open(filename, "r") as f: + for lineno, line in enumerate(f, 1): + line = line.strip() + if (len(line) == 0) or (line[0] == "#"): + continue + try: + entry = HostKeyEntry.from_line(line, lineno) + except SSHException: + continue + if entry is not None: + _hostnames = entry.hostnames + for h in _hostnames: + if self.check(h, entry.key): + entry.hostnames.remove(h) + if len(entry.hostnames): + self._entries.append(entry) + + def save(self, filename): + """ + Save host keys into a file, in the format used by OpenSSH. The order + of keys in the file will be preserved when possible (if these keys were + loaded from a file originally). The single exception is that combined + lines will be split into individual key lines, which is arguably a bug. + + :param str filename: name of the file to write + + :raises: ``IOError`` -- if there was an error writing the file + + .. versionadded:: 1.6.1 + """ + with open(filename, "w") as f: + for e in self._entries: + line = e.to_line() + if line: + f.write(line) + + def lookup(self, hostname): + """ + Find a hostkey entry for a given hostname or IP. If no entry is found, + ``None`` is returned. Otherwise a dictionary of keytype to key is + returned. + + :param str hostname: the hostname (or IP) to lookup + :return: dict of `str` -> `.PKey` keys associated with this host + (or ``None``) + """ + + class SubDict(MutableMapping): + def __init__(self, hostname, entries, hostkeys): + self._hostname = hostname + self._entries = entries + self._hostkeys = hostkeys + + def __iter__(self): + for k in self.keys(): + yield k + + def __len__(self): + return len(self.keys()) + + def __delitem__(self, key): + for e in list(self._entries): + if e.key.get_name() == key: + self._entries.remove(e) + break + else: + raise KeyError(key) + + def __getitem__(self, key): + for e in self._entries: + if e.key.get_name() == key: + return e.key + raise KeyError(key) + + def __setitem__(self, key, val): + for e in self._entries: + if e.key is None: + continue + if e.key.get_name() == key: + # replace + e.key = val + break + else: + # add a new one + e = HostKeyEntry([hostname], val) + self._entries.append(e) + self._hostkeys._entries.append(e) + + def keys(self): + return [ + e.key.get_name() + for e in self._entries + if e.key is not None + ] + + entries = [] + for e in self._entries: + if self._hostname_matches(hostname, e): + entries.append(e) + if len(entries) == 0: + return None + return SubDict(hostname, entries, self) + + def _hostname_matches(self, hostname, entry): + """ + Tests whether ``hostname`` string matches given SubDict ``entry``. + + :returns bool: + """ + for h in entry.hostnames: + if ( + h == hostname + or h.startswith("|1|") + and not hostname.startswith("|1|") + and constant_time_bytes_eq(self.hash_host(hostname, h), h) + ): + return True + return False + + def check(self, hostname, key): + """ + Return True if the given key is associated with the given hostname + in this dictionary. + + :param str hostname: hostname (or IP) of the SSH server + :param .PKey key: the key to check + :return: + ``True`` if the key is associated with the hostname; else ``False`` + """ + k = self.lookup(hostname) + if k is None: + return False + host_key = k.get(key.get_name(), None) + if host_key is None: + return False + return host_key.asbytes() == key.asbytes() + + def clear(self): + """ + Remove all host keys from the dictionary. + """ + self._entries = [] + + def __iter__(self): + for k in self.keys(): + yield k + + def __len__(self): + return len(self.keys()) + + def __getitem__(self, key): + ret = self.lookup(key) + if ret is None: + raise KeyError(key) + return ret + + def __delitem__(self, key): + index = None + for i, entry in enumerate(self._entries): + if self._hostname_matches(key, entry): + index = i + break + if index is None: + raise KeyError(key) + self._entries.pop(index) + + def __setitem__(self, hostname, entry): + # don't use this please. + if len(entry) == 0: + self._entries.append(HostKeyEntry([hostname], None)) + return + for key_type in entry.keys(): + found = False + for e in self._entries: + if (hostname in e.hostnames) and e.key.get_name() == key_type: + # replace + e.key = entry[key_type] + found = True + if not found: + self._entries.append(HostKeyEntry([hostname], entry[key_type])) + + def keys(self): + ret = [] + for e in self._entries: + for h in e.hostnames: + if h not in ret: + ret.append(h) + return ret + + def values(self): + ret = [] + for k in self.keys(): + ret.append(self.lookup(k)) + return ret + + @staticmethod + def hash_host(hostname, salt=None): + """ + Return a "hashed" form of the hostname, as used by OpenSSH when storing + hashed hostnames in the known_hosts file. + + :param str hostname: the hostname to hash + :param str salt: optional salt to use when hashing + (must be 20 bytes long) + :return: the hashed hostname as a `str` + """ + if salt is None: + salt = os.urandom(sha1().digest_size) + else: + if salt.startswith("|1|"): + salt = salt.split("|")[2] + salt = decodebytes(b(salt)) + assert len(salt) == sha1().digest_size + hmac = HMAC(salt, b(hostname), sha1).digest() + hostkey = "|1|{}|{}".format(u(encodebytes(salt)), u(encodebytes(hmac))) + return hostkey.replace("\n", "") + + +class InvalidHostKey(Exception): + def __init__(self, line, exc): + self.line = line + self.exc = exc + self.args = (line, exc) + + +class HostKeyEntry: + """ + Representation of a line in an OpenSSH-style "known hosts" file. + """ + + def __init__(self, hostnames=None, key=None): + self.valid = (hostnames is not None) and (key is not None) + self.hostnames = hostnames + self.key = key + + @classmethod + def from_line(cls, line, lineno=None): + """ + Parses the given line of text to find the names for the host, + the type of key, and the key data. The line is expected to be in the + format used by the OpenSSH known_hosts file. Fields are separated by a + single space or tab. + + Lines are expected to not have leading or trailing whitespace. + We don't bother to check for comments or empty lines. All of + that should be taken care of before sending the line to us. + + :param str line: a line from an OpenSSH known_hosts file + """ + log = get_logger("paramiko.hostkeys") + fields = re.split(" |\t", line) + if len(fields) < 3: + # Bad number of fields + msg = "Not enough fields found in known_hosts in line {} ({!r})" + log.info(msg.format(lineno, line)) + return None + fields = fields[:3] + + names, key_type, key = fields + names = names.split(",") + + # Decide what kind of key we're looking at and create an object + # to hold it accordingly. + try: + # TODO: this grew organically and doesn't seem /wrong/ per se (file + # read -> unicode str -> bytes for base64 decode -> decoded bytes); + # but in Python 3 forever land, can we simply use + # `base64.b64decode(str-from-file)` here? + key_bytes = decodebytes(b(key)) + except binascii.Error as e: + raise InvalidHostKey(line, e) + + try: + return cls(names, PKey.from_type_string(key_type, key_bytes)) + except UnknownKeyType: + # TODO 4.0: consider changing HostKeys API so this just raises + # naturally and the exception is muted higher up in the stack? + log.info("Unable to handle key of type {}".format(key_type)) + return None + + def to_line(self): + """ + Returns a string in OpenSSH known_hosts file format, or None if + the object is not in a valid state. A trailing newline is + included. + """ + if self.valid: + return "{} {} {}\n".format( + ",".join(self.hostnames), + self.key.get_name(), + self.key.get_base64(), + ) + return None + + def __repr__(self): + return "".format(self.hostnames, self.key) diff --git a/.venv/lib/python3.9/site-packages/paramiko/kex_curve25519.py b/.venv/lib/python3.9/site-packages/paramiko/kex_curve25519.py new file mode 100644 index 0000000..20c23e4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/kex_curve25519.py @@ -0,0 +1,131 @@ +import binascii +import hashlib + +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.hazmat.primitives.asymmetric.x25519 import ( + X25519PrivateKey, + X25519PublicKey, +) + +from paramiko.message import Message +from paramiko.common import byte_chr +from paramiko.ssh_exception import SSHException + + +_MSG_KEXECDH_INIT, _MSG_KEXECDH_REPLY = range(30, 32) +c_MSG_KEXECDH_INIT, c_MSG_KEXECDH_REPLY = [byte_chr(c) for c in range(30, 32)] + + +class KexCurve25519: + hash_algo = hashlib.sha256 + + def __init__(self, transport): + self.transport = transport + self.key = None + + @classmethod + def is_available(cls): + try: + X25519PrivateKey.generate() + except UnsupportedAlgorithm: + return False + else: + return True + + def _perform_exchange(self, peer_key): + secret = self.key.exchange(peer_key) + if constant_time.bytes_eq(secret, b"\x00" * 32): + raise SSHException( + "peer's curve25519 public value has wrong order" + ) + return secret + + def start_kex(self): + self.key = X25519PrivateKey.generate() + if self.transport.server_mode: + self.transport._expect_packet(_MSG_KEXECDH_INIT) + return + + m = Message() + m.add_byte(c_MSG_KEXECDH_INIT) + m.add_string( + self.key.public_key().public_bytes( + serialization.Encoding.Raw, serialization.PublicFormat.Raw + ) + ) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXECDH_REPLY) + + def parse_next(self, ptype, m): + if self.transport.server_mode and (ptype == _MSG_KEXECDH_INIT): + return self._parse_kexecdh_init(m) + elif not self.transport.server_mode and (ptype == _MSG_KEXECDH_REPLY): + return self._parse_kexecdh_reply(m) + raise SSHException( + "KexCurve25519 asked to handle packet type {:d}".format(ptype) + ) + + def _parse_kexecdh_init(self, m): + peer_key_bytes = m.get_string() + peer_key = X25519PublicKey.from_public_bytes(peer_key_bytes) + K = self._perform_exchange(peer_key) + K = int(binascii.hexlify(K), 16) + # compute exchange hash + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) + server_key_bytes = self.transport.get_server_key().asbytes() + exchange_key_bytes = self.key.public_key().public_bytes( + serialization.Encoding.Raw, serialization.PublicFormat.Raw + ) + hm.add_string(server_key_bytes) + hm.add_string(peer_key_bytes) + hm.add_string(exchange_key_bytes) + hm.add_mpint(K) + H = self.hash_algo(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) + # construct reply + m = Message() + m.add_byte(c_MSG_KEXECDH_REPLY) + m.add_string(server_key_bytes) + m.add_string(exchange_key_bytes) + m.add_string(sig) + self.transport._send_message(m) + self.transport._activate_outbound() + + def _parse_kexecdh_reply(self, m): + peer_host_key_bytes = m.get_string() + peer_key_bytes = m.get_string() + sig = m.get_binary() + + peer_key = X25519PublicKey.from_public_bytes(peer_key_bytes) + + K = self._perform_exchange(peer_key) + K = int(binascii.hexlify(K), 16) + # compute exchange hash and verify signature + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) + hm.add_string(peer_host_key_bytes) + hm.add_string( + self.key.public_key().public_bytes( + serialization.Encoding.Raw, serialization.PublicFormat.Raw + ) + ) + hm.add_string(peer_key_bytes) + hm.add_mpint(K) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) + self.transport._verify_key(peer_host_key_bytes, sig) + self.transport._activate_outbound() diff --git a/.venv/lib/python3.9/site-packages/paramiko/kex_ecdh_nist.py b/.venv/lib/python3.9/site-packages/paramiko/kex_ecdh_nist.py new file mode 100644 index 0000000..41fab46 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/kex_ecdh_nist.py @@ -0,0 +1,151 @@ +""" +Ephemeral Elliptic Curve Diffie-Hellman (ECDH) key exchange +RFC 5656, Section 4 +""" + +from hashlib import sha256, sha384, sha512 +from paramiko.common import byte_chr +from paramiko.message import Message +from paramiko.ssh_exception import SSHException +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives import serialization +from binascii import hexlify + +_MSG_KEXECDH_INIT, _MSG_KEXECDH_REPLY = range(30, 32) +c_MSG_KEXECDH_INIT, c_MSG_KEXECDH_REPLY = [byte_chr(c) for c in range(30, 32)] + + +class KexNistp256: + + name = "ecdh-sha2-nistp256" + hash_algo = sha256 + curve = ec.SECP256R1() + + def __init__(self, transport): + self.transport = transport + # private key, client public and server public keys + self.P = 0 + self.Q_C = None + self.Q_S = None + + def start_kex(self): + self._generate_key_pair() + if self.transport.server_mode: + self.transport._expect_packet(_MSG_KEXECDH_INIT) + return + m = Message() + m.add_byte(c_MSG_KEXECDH_INIT) + # SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion + m.add_string( + self.Q_C.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXECDH_REPLY) + + def parse_next(self, ptype, m): + if self.transport.server_mode and (ptype == _MSG_KEXECDH_INIT): + return self._parse_kexecdh_init(m) + elif not self.transport.server_mode and (ptype == _MSG_KEXECDH_REPLY): + return self._parse_kexecdh_reply(m) + raise SSHException( + "KexECDH asked to handle packet type {:d}".format(ptype) + ) + + def _generate_key_pair(self): + self.P = ec.generate_private_key(self.curve, default_backend()) + if self.transport.server_mode: + self.Q_S = self.P.public_key() + return + self.Q_C = self.P.public_key() + + def _parse_kexecdh_init(self, m): + Q_C_bytes = m.get_string() + self.Q_C = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, Q_C_bytes + ) + K_S = self.transport.get_server_key().asbytes() + K = self.P.exchange(ec.ECDH(), self.Q_C) + K = int(hexlify(K), 16) + # compute exchange hash + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) + hm.add_string(K_S) + hm.add_string(Q_C_bytes) + # SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion + hm.add_string( + self.Q_S.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + hm.add_mpint(int(K)) + H = self.hash_algo(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) + # construct reply + m = Message() + m.add_byte(c_MSG_KEXECDH_REPLY) + m.add_string(K_S) + m.add_string( + self.Q_S.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + m.add_string(sig) + self.transport._send_message(m) + self.transport._activate_outbound() + + def _parse_kexecdh_reply(self, m): + K_S = m.get_string() + Q_S_bytes = m.get_string() + self.Q_S = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, Q_S_bytes + ) + sig = m.get_binary() + K = self.P.exchange(ec.ECDH(), self.Q_S) + K = int(hexlify(K), 16) + # compute exchange hash and verify signature + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) + hm.add_string(K_S) + # SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion + hm.add_string( + self.Q_C.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + hm.add_string(Q_S_bytes) + hm.add_mpint(K) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) + self.transport._verify_key(K_S, sig) + self.transport._activate_outbound() + + +class KexNistp384(KexNistp256): + name = "ecdh-sha2-nistp384" + hash_algo = sha384 + curve = ec.SECP384R1() + + +class KexNistp521(KexNistp256): + name = "ecdh-sha2-nistp521" + hash_algo = sha512 + curve = ec.SECP521R1() diff --git a/.venv/lib/python3.9/site-packages/paramiko/kex_gex.py b/.venv/lib/python3.9/site-packages/paramiko/kex_gex.py new file mode 100644 index 0000000..baa0803 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/kex_gex.py @@ -0,0 +1,288 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Variant on `KexGroup1 ` where the prime "p" and +generator "g" are provided by the server. A bit more work is required on the +client side, and a **lot** more on the server side. +""" + +import os +from hashlib import sha1, sha256 + +from paramiko import util +from paramiko.common import DEBUG, byte_chr, byte_ord, byte_mask +from paramiko.message import Message +from paramiko.ssh_exception import SSHException + + +( + _MSG_KEXDH_GEX_REQUEST_OLD, + _MSG_KEXDH_GEX_GROUP, + _MSG_KEXDH_GEX_INIT, + _MSG_KEXDH_GEX_REPLY, + _MSG_KEXDH_GEX_REQUEST, +) = range(30, 35) + +( + c_MSG_KEXDH_GEX_REQUEST_OLD, + c_MSG_KEXDH_GEX_GROUP, + c_MSG_KEXDH_GEX_INIT, + c_MSG_KEXDH_GEX_REPLY, + c_MSG_KEXDH_GEX_REQUEST, +) = [byte_chr(c) for c in range(30, 35)] + + +class KexGex: + + name = "diffie-hellman-group-exchange-sha1" + min_bits = 1024 + max_bits = 8192 + preferred_bits = 2048 + hash_algo = sha1 + + def __init__(self, transport): + self.transport = transport + self.p = None + self.q = None + self.g = None + self.x = None + self.e = None + self.f = None + self.old_style = False + + def start_kex(self, _test_old_style=False): + if self.transport.server_mode: + self.transport._expect_packet( + _MSG_KEXDH_GEX_REQUEST, _MSG_KEXDH_GEX_REQUEST_OLD + ) + return + # request a bit range: we accept (min_bits) to (max_bits), but prefer + # (preferred_bits). according to the spec, we shouldn't pull the + # minimum up above 1024. + m = Message() + if _test_old_style: + # only used for unit tests: we shouldn't ever send this + m.add_byte(c_MSG_KEXDH_GEX_REQUEST_OLD) + m.add_int(self.preferred_bits) + self.old_style = True + else: + m.add_byte(c_MSG_KEXDH_GEX_REQUEST) + m.add_int(self.min_bits) + m.add_int(self.preferred_bits) + m.add_int(self.max_bits) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXDH_GEX_GROUP) + + def parse_next(self, ptype, m): + if ptype == _MSG_KEXDH_GEX_REQUEST: + return self._parse_kexdh_gex_request(m) + elif ptype == _MSG_KEXDH_GEX_GROUP: + return self._parse_kexdh_gex_group(m) + elif ptype == _MSG_KEXDH_GEX_INIT: + return self._parse_kexdh_gex_init(m) + elif ptype == _MSG_KEXDH_GEX_REPLY: + return self._parse_kexdh_gex_reply(m) + elif ptype == _MSG_KEXDH_GEX_REQUEST_OLD: + return self._parse_kexdh_gex_request_old(m) + msg = "KexGex {} asked to handle packet type {:d}" + raise SSHException(msg.format(self.name, ptype)) + + # ...internals... + + def _generate_x(self): + # generate an "x" (1 < x < (p-1)/2). + q = (self.p - 1) // 2 + qnorm = util.deflate_long(q, 0) + qhbyte = byte_ord(qnorm[0]) + byte_count = len(qnorm) + qmask = 0xFF + while not (qhbyte & 0x80): + qhbyte <<= 1 + qmask >>= 1 + while True: + x_bytes = os.urandom(byte_count) + x_bytes = byte_mask(x_bytes[0], qmask) + x_bytes[1:] + x = util.inflate_long(x_bytes, 1) + if (x > 1) and (x < q): + break + self.x = x + + def _parse_kexdh_gex_request(self, m): + minbits = m.get_int() + preferredbits = m.get_int() + maxbits = m.get_int() + # smoosh the user's preferred size into our own limits + if preferredbits > self.max_bits: + preferredbits = self.max_bits + if preferredbits < self.min_bits: + preferredbits = self.min_bits + # fix min/max if they're inconsistent. technically, we could just pout + # and hang up, but there's no harm in giving them the benefit of the + # doubt and just picking a bitsize for them. + if minbits > preferredbits: + minbits = preferredbits + if maxbits < preferredbits: + maxbits = preferredbits + # now save a copy + self.min_bits = minbits + self.preferred_bits = preferredbits + self.max_bits = maxbits + # generate prime + pack = self.transport._get_modulus_pack() + if pack is None: + raise SSHException("Can't do server-side gex with no modulus pack") + self.transport._log( + DEBUG, + "Picking p ({} <= {} <= {} bits)".format( + minbits, preferredbits, maxbits + ), + ) + self.g, self.p = pack.get_modulus(minbits, preferredbits, maxbits) + m = Message() + m.add_byte(c_MSG_KEXDH_GEX_GROUP) + m.add_mpint(self.p) + m.add_mpint(self.g) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXDH_GEX_INIT) + + def _parse_kexdh_gex_request_old(self, m): + # same as above, but without min_bits or max_bits (used by older + # clients like putty) + self.preferred_bits = m.get_int() + # smoosh the user's preferred size into our own limits + if self.preferred_bits > self.max_bits: + self.preferred_bits = self.max_bits + if self.preferred_bits < self.min_bits: + self.preferred_bits = self.min_bits + # generate prime + pack = self.transport._get_modulus_pack() + if pack is None: + raise SSHException("Can't do server-side gex with no modulus pack") + self.transport._log( + DEBUG, "Picking p (~ {} bits)".format(self.preferred_bits) + ) + self.g, self.p = pack.get_modulus( + self.min_bits, self.preferred_bits, self.max_bits + ) + m = Message() + m.add_byte(c_MSG_KEXDH_GEX_GROUP) + m.add_mpint(self.p) + m.add_mpint(self.g) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXDH_GEX_INIT) + self.old_style = True + + def _parse_kexdh_gex_group(self, m): + self.p = m.get_mpint() + self.g = m.get_mpint() + # reject if p's bit length < 1024 or > 8192 + bitlen = util.bit_length(self.p) + if (bitlen < 1024) or (bitlen > 8192): + raise SSHException( + "Server-generated gex p (don't ask) is out of range " + "({} bits)".format(bitlen) + ) + self.transport._log(DEBUG, "Got server p ({} bits)".format(bitlen)) + self._generate_x() + # now compute e = g^x mod p + self.e = pow(self.g, self.x, self.p) + m = Message() + m.add_byte(c_MSG_KEXDH_GEX_INIT) + m.add_mpint(self.e) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXDH_GEX_REPLY) + + def _parse_kexdh_gex_init(self, m): + self.e = m.get_mpint() + if (self.e < 1) or (self.e > self.p - 1): + raise SSHException('Client kex "e" is out of range') + self._generate_x() + self.f = pow(self.g, self.x, self.p) + K = pow(self.e, self.x, self.p) + key = self.transport.get_server_key().asbytes() + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + key, + ) + if not self.old_style: + hm.add_int(self.min_bits) + hm.add_int(self.preferred_bits) + if not self.old_style: + hm.add_int(self.max_bits) + hm.add_mpint(self.p) + hm.add_mpint(self.g) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = self.hash_algo(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + # sign it + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) + # send reply + m = Message() + m.add_byte(c_MSG_KEXDH_GEX_REPLY) + m.add_string(key) + m.add_mpint(self.f) + m.add_string(sig) + self.transport._send_message(m) + self.transport._activate_outbound() + + def _parse_kexdh_gex_reply(self, m): + host_key = m.get_string() + self.f = m.get_mpint() + sig = m.get_string() + if (self.f < 1) or (self.f > self.p - 1): + raise SSHException('Server kex "f" is out of range') + K = pow(self.f, self.x, self.p) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + host_key, + ) + if not self.old_style: + hm.add_int(self.min_bits) + hm.add_int(self.preferred_bits) + if not self.old_style: + hm.add_int(self.max_bits) + hm.add_mpint(self.p) + hm.add_mpint(self.g) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) + self.transport._verify_key(host_key, sig) + self.transport._activate_outbound() + + +class KexGexSHA256(KexGex): + name = "diffie-hellman-group-exchange-sha256" + hash_algo = sha256 diff --git a/.venv/lib/python3.9/site-packages/paramiko/kex_group1.py b/.venv/lib/python3.9/site-packages/paramiko/kex_group1.py new file mode 100644 index 0000000..f074256 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/kex_group1.py @@ -0,0 +1,155 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of +1024 bit key halves, using a known "p" prime and "g" generator. +""" + +import os +from hashlib import sha1 + +from paramiko import util +from paramiko.common import max_byte, zero_byte, byte_chr, byte_mask +from paramiko.message import Message +from paramiko.ssh_exception import SSHException + + +_MSG_KEXDH_INIT, _MSG_KEXDH_REPLY = range(30, 32) +c_MSG_KEXDH_INIT, c_MSG_KEXDH_REPLY = [byte_chr(c) for c in range(30, 32)] + +b7fffffffffffffff = byte_chr(0x7F) + max_byte * 7 +b0000000000000000 = zero_byte * 8 + + +class KexGroup1: + + # draft-ietf-secsh-transport-09.txt, page 17 + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa + G = 2 + + name = "diffie-hellman-group1-sha1" + hash_algo = sha1 + + def __init__(self, transport): + self.transport = transport + self.x = 0 + self.e = 0 + self.f = 0 + + def start_kex(self): + self._generate_x() + if self.transport.server_mode: + # compute f = g^x mod p, but don't send it yet + self.f = pow(self.G, self.x, self.P) + self.transport._expect_packet(_MSG_KEXDH_INIT) + return + # compute e = g^x mod p (where g=2), and send it + self.e = pow(self.G, self.x, self.P) + m = Message() + m.add_byte(c_MSG_KEXDH_INIT) + m.add_mpint(self.e) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXDH_REPLY) + + def parse_next(self, ptype, m): + if self.transport.server_mode and (ptype == _MSG_KEXDH_INIT): + return self._parse_kexdh_init(m) + elif not self.transport.server_mode and (ptype == _MSG_KEXDH_REPLY): + return self._parse_kexdh_reply(m) + msg = "KexGroup1 asked to handle packet type {:d}" + raise SSHException(msg.format(ptype)) + + # ...internals... + + def _generate_x(self): + # generate an "x" (1 < x < q), where q is (p-1)/2. + # p is a 128-byte (1024-bit) number, where the first 64 bits are 1. + # therefore q can be approximated as a 2^1023. we drop the subset of + # potential x where the first 63 bits are 1, because some of those + # will be larger than q (but this is a tiny tiny subset of + # potential x). + while 1: + x_bytes = os.urandom(128) + x_bytes = byte_mask(x_bytes[0], 0x7F) + x_bytes[1:] + if ( + x_bytes[:8] != b7fffffffffffffff + and x_bytes[:8] != b0000000000000000 + ): + break + self.x = util.inflate_long(x_bytes) + + def _parse_kexdh_reply(self, m): + # client mode + host_key = m.get_string() + self.f = m.get_mpint() + if (self.f < 1) or (self.f > self.P - 1): + raise SSHException('Server kex "f" is out of range') + sig = m.get_binary() + K = pow(self.f, self.x, self.P) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) + hm.add_string(host_key) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) + self.transport._verify_key(host_key, sig) + self.transport._activate_outbound() + + def _parse_kexdh_init(self, m): + # server mode + self.e = m.get_mpint() + if (self.e < 1) or (self.e > self.P - 1): + raise SSHException('Client kex "e" is out of range') + K = pow(self.e, self.x, self.P) + key = self.transport.get_server_key().asbytes() + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) + hm.add_string(key) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = self.hash_algo(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + # sign it + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) + # send reply + m = Message() + m.add_byte(c_MSG_KEXDH_REPLY) + m.add_string(key) + m.add_mpint(self.f) + m.add_string(sig) + self.transport._send_message(m) + self.transport._activate_outbound() diff --git a/.venv/lib/python3.9/site-packages/paramiko/kex_group14.py b/.venv/lib/python3.9/site-packages/paramiko/kex_group14.py new file mode 100644 index 0000000..8dee551 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/kex_group14.py @@ -0,0 +1,40 @@ +# Copyright (C) 2013 Torsten Landschoff +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of +2048 bit key halves, using a known "p" prime and "g" generator. +""" + +from paramiko.kex_group1 import KexGroup1 +from hashlib import sha1, sha256 + + +class KexGroup14(KexGroup1): + + # http://tools.ietf.org/html/rfc3526#section-3 + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF # noqa + G = 2 + + name = "diffie-hellman-group14-sha1" + hash_algo = sha1 + + +class KexGroup14SHA256(KexGroup14): + name = "diffie-hellman-group14-sha256" + hash_algo = sha256 diff --git a/.venv/lib/python3.9/site-packages/paramiko/kex_group16.py b/.venv/lib/python3.9/site-packages/paramiko/kex_group16.py new file mode 100644 index 0000000..c675f87 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/kex_group16.py @@ -0,0 +1,35 @@ +# Copyright (C) 2019 Edgar Sousa +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of +4096 bit key halves, using a known "p" prime and "g" generator. +""" + +from paramiko.kex_group1 import KexGroup1 +from hashlib import sha512 + + +class KexGroup16SHA512(KexGroup1): + name = "diffie-hellman-group16-sha512" + # http://tools.ietf.org/html/rfc3526#section-5 + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF # noqa + G = 2 + + name = "diffie-hellman-group16-sha512" + hash_algo = sha512 diff --git a/.venv/lib/python3.9/site-packages/paramiko/kex_gss.py b/.venv/lib/python3.9/site-packages/paramiko/kex_gss.py new file mode 100644 index 0000000..2a5f29e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/kex_gss.py @@ -0,0 +1,686 @@ +# Copyright (C) 2003-2007 Robey Pointer +# Copyright (C) 2013-2014 science + computing ag +# Author: Sebastian Deiss +# +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +""" +This module provides GSS-API / SSPI Key Exchange as defined in :rfc:`4462`. + +.. note:: Credential delegation is not supported in server mode. + +.. note:: + `RFC 4462 Section 2.2 + `_ says we are not + required to implement GSS-API error messages. Thus, in many methods within + this module, if an error occurs an exception will be thrown and the + connection will be terminated. + +.. seealso:: :doc:`/api/ssh_gss` + +.. versionadded:: 1.15 +""" + +import os +from hashlib import sha1 + +from paramiko.common import ( + DEBUG, + max_byte, + zero_byte, + byte_chr, + byte_mask, + byte_ord, +) +from paramiko import util +from paramiko.message import Message +from paramiko.ssh_exception import SSHException + + +( + MSG_KEXGSS_INIT, + MSG_KEXGSS_CONTINUE, + MSG_KEXGSS_COMPLETE, + MSG_KEXGSS_HOSTKEY, + MSG_KEXGSS_ERROR, +) = range(30, 35) +(MSG_KEXGSS_GROUPREQ, MSG_KEXGSS_GROUP) = range(40, 42) +( + c_MSG_KEXGSS_INIT, + c_MSG_KEXGSS_CONTINUE, + c_MSG_KEXGSS_COMPLETE, + c_MSG_KEXGSS_HOSTKEY, + c_MSG_KEXGSS_ERROR, +) = [byte_chr(c) for c in range(30, 35)] +(c_MSG_KEXGSS_GROUPREQ, c_MSG_KEXGSS_GROUP) = [ + byte_chr(c) for c in range(40, 42) +] + + +class KexGSSGroup1: + """ + GSS-API / SSPI Authenticated Diffie-Hellman Key Exchange as defined in `RFC + 4462 Section 2 `_ + """ + + # draft-ietf-secsh-transport-09.txt, page 17 + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa + G = 2 + b7fffffffffffffff = byte_chr(0x7F) + max_byte * 7 # noqa + b0000000000000000 = zero_byte * 8 # noqa + NAME = "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==" + + def __init__(self, transport): + self.transport = transport + self.kexgss = self.transport.kexgss_ctxt + self.gss_host = None + self.x = 0 + self.e = 0 + self.f = 0 + + def start_kex(self): + """ + Start the GSS-API / SSPI Authenticated Diffie-Hellman Key Exchange. + """ + self._generate_x() + if self.transport.server_mode: + # compute f = g^x mod p, but don't send it yet + self.f = pow(self.G, self.x, self.P) + self.transport._expect_packet(MSG_KEXGSS_INIT) + return + # compute e = g^x mod p (where g=2), and send it + self.e = pow(self.G, self.x, self.P) + # Initialize GSS-API Key Exchange + self.gss_host = self.transport.gss_host + m = Message() + m.add_byte(c_MSG_KEXGSS_INIT) + m.add_string(self.kexgss.ssh_init_sec_context(target=self.gss_host)) + m.add_mpint(self.e) + self.transport._send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_HOSTKEY, + MSG_KEXGSS_CONTINUE, + MSG_KEXGSS_COMPLETE, + MSG_KEXGSS_ERROR, + ) + + def parse_next(self, ptype, m): + """ + Parse the next packet. + + :param ptype: The (string) type of the incoming packet + :param `.Message` m: The packet content + """ + if self.transport.server_mode and (ptype == MSG_KEXGSS_INIT): + return self._parse_kexgss_init(m) + elif not self.transport.server_mode and (ptype == MSG_KEXGSS_HOSTKEY): + return self._parse_kexgss_hostkey(m) + elif self.transport.server_mode and (ptype == MSG_KEXGSS_CONTINUE): + return self._parse_kexgss_continue(m) + elif not self.transport.server_mode and (ptype == MSG_KEXGSS_COMPLETE): + return self._parse_kexgss_complete(m) + elif ptype == MSG_KEXGSS_ERROR: + return self._parse_kexgss_error(m) + msg = "GSS KexGroup1 asked to handle packet type {:d}" + raise SSHException(msg.format(ptype)) + + # ## internals... + + def _generate_x(self): + """ + generate an "x" (1 < x < q), where q is (p-1)/2. + p is a 128-byte (1024-bit) number, where the first 64 bits are 1. + therefore q can be approximated as a 2^1023. we drop the subset of + potential x where the first 63 bits are 1, because some of those will + be larger than q (but this is a tiny tiny subset of potential x). + """ + while 1: + x_bytes = os.urandom(128) + x_bytes = byte_mask(x_bytes[0], 0x7F) + x_bytes[1:] + first = x_bytes[:8] + if first not in (self.b7fffffffffffffff, self.b0000000000000000): + break + self.x = util.inflate_long(x_bytes) + + def _parse_kexgss_hostkey(self, m): + """ + Parse the SSH2_MSG_KEXGSS_HOSTKEY message (client mode). + + :param `.Message` m: The content of the SSH2_MSG_KEXGSS_HOSTKEY message + """ + # client mode + host_key = m.get_string() + self.transport.host_key = host_key + sig = m.get_string() + self.transport._verify_key(host_key, sig) + self.transport._expect_packet(MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE) + + def _parse_kexgss_continue(self, m): + """ + Parse the SSH2_MSG_KEXGSS_CONTINUE message. + + :param `.Message` m: The content of the SSH2_MSG_KEXGSS_CONTINUE + message + """ + if not self.transport.server_mode: + srv_token = m.get_string() + m = Message() + m.add_byte(c_MSG_KEXGSS_CONTINUE) + m.add_string( + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + ) + self.transport.send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) + else: + pass + + def _parse_kexgss_complete(self, m): + """ + Parse the SSH2_MSG_KEXGSS_COMPLETE message (client mode). + + :param `.Message` m: The content of the + SSH2_MSG_KEXGSS_COMPLETE message + """ + # client mode + if self.transport.host_key is None: + self.transport.host_key = NullHostKey() + self.f = m.get_mpint() + if (self.f < 1) or (self.f > self.P - 1): + raise SSHException('Server kex "f" is out of range') + mic_token = m.get_string() + # This must be TRUE, if there is a GSS-API token in this message. + bool = m.get_boolean() + srv_token = None + if bool: + srv_token = m.get_string() + K = pow(self.f, self.x, self.P) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) + hm.add_string(self.transport.host_key.__str__()) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = sha1(str(hm)).digest() + self.transport._set_K_H(K, H) + if srv_token is not None: + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + self.kexgss.ssh_check_mic(mic_token, H) + else: + self.kexgss.ssh_check_mic(mic_token, H) + self.transport.gss_kex_used = True + self.transport._activate_outbound() + + def _parse_kexgss_init(self, m): + """ + Parse the SSH2_MSG_KEXGSS_INIT message (server mode). + + :param `.Message` m: The content of the SSH2_MSG_KEXGSS_INIT message + """ + # server mode + client_token = m.get_string() + self.e = m.get_mpint() + if (self.e < 1) or (self.e > self.P - 1): + raise SSHException('Client kex "e" is out of range') + K = pow(self.e, self.x, self.P) + self.transport.host_key = NullHostKey() + key = self.transport.host_key.__str__() + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) + hm.add_string(key) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = sha1(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + srv_token = self.kexgss.ssh_accept_sec_context( + self.gss_host, client_token + ) + m = Message() + if self.kexgss._gss_srv_ctxt_status: + mic_token = self.kexgss.ssh_get_mic( + self.transport.session_id, gss_kex=True + ) + m.add_byte(c_MSG_KEXGSS_COMPLETE) + m.add_mpint(self.f) + m.add_string(mic_token) + if srv_token is not None: + m.add_boolean(True) + m.add_string(srv_token) + else: + m.add_boolean(False) + self.transport._send_message(m) + self.transport.gss_kex_used = True + self.transport._activate_outbound() + else: + m.add_byte(c_MSG_KEXGSS_CONTINUE) + m.add_string(srv_token) + self.transport._send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) + + def _parse_kexgss_error(self, m): + """ + Parse the SSH2_MSG_KEXGSS_ERROR message (client mode). + The server may send a GSS-API error message. if it does, we display + the error by throwing an exception (client mode). + + :param `.Message` m: The content of the SSH2_MSG_KEXGSS_ERROR message + :raise SSHException: Contains GSS-API major and minor status as well as + the error message and the language tag of the + message + """ + maj_status = m.get_int() + min_status = m.get_int() + err_msg = m.get_string() + m.get_string() # we don't care about the language! + raise SSHException( + """GSS-API Error: +Major Status: {} +Minor Status: {} +Error Message: {} +""".format( + maj_status, min_status, err_msg + ) + ) + + +class KexGSSGroup14(KexGSSGroup1): + """ + GSS-API / SSPI Authenticated Diffie-Hellman Group14 Key Exchange as defined + in `RFC 4462 Section 2 + `_ + """ + + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF # noqa + G = 2 + NAME = "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==" + + +class KexGSSGex: + """ + GSS-API / SSPI Authenticated Diffie-Hellman Group Exchange as defined in + `RFC 4462 Section 2 `_ + """ + + NAME = "gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==" + min_bits = 1024 + max_bits = 8192 + preferred_bits = 2048 + + def __init__(self, transport): + self.transport = transport + self.kexgss = self.transport.kexgss_ctxt + self.gss_host = None + self.p = None + self.q = None + self.g = None + self.x = None + self.e = None + self.f = None + self.old_style = False + + def start_kex(self): + """ + Start the GSS-API / SSPI Authenticated Diffie-Hellman Group Exchange + """ + if self.transport.server_mode: + self.transport._expect_packet(MSG_KEXGSS_GROUPREQ) + return + # request a bit range: we accept (min_bits) to (max_bits), but prefer + # (preferred_bits). according to the spec, we shouldn't pull the + # minimum up above 1024. + self.gss_host = self.transport.gss_host + m = Message() + m.add_byte(c_MSG_KEXGSS_GROUPREQ) + m.add_int(self.min_bits) + m.add_int(self.preferred_bits) + m.add_int(self.max_bits) + self.transport._send_message(m) + self.transport._expect_packet(MSG_KEXGSS_GROUP) + + def parse_next(self, ptype, m): + """ + Parse the next packet. + + :param ptype: The (string) type of the incoming packet + :param `.Message` m: The packet content + """ + if ptype == MSG_KEXGSS_GROUPREQ: + return self._parse_kexgss_groupreq(m) + elif ptype == MSG_KEXGSS_GROUP: + return self._parse_kexgss_group(m) + elif ptype == MSG_KEXGSS_INIT: + return self._parse_kexgss_gex_init(m) + elif ptype == MSG_KEXGSS_HOSTKEY: + return self._parse_kexgss_hostkey(m) + elif ptype == MSG_KEXGSS_CONTINUE: + return self._parse_kexgss_continue(m) + elif ptype == MSG_KEXGSS_COMPLETE: + return self._parse_kexgss_complete(m) + elif ptype == MSG_KEXGSS_ERROR: + return self._parse_kexgss_error(m) + msg = "KexGex asked to handle packet type {:d}" + raise SSHException(msg.format(ptype)) + + # ## internals... + + def _generate_x(self): + # generate an "x" (1 < x < (p-1)/2). + q = (self.p - 1) // 2 + qnorm = util.deflate_long(q, 0) + qhbyte = byte_ord(qnorm[0]) + byte_count = len(qnorm) + qmask = 0xFF + while not (qhbyte & 0x80): + qhbyte <<= 1 + qmask >>= 1 + while True: + x_bytes = os.urandom(byte_count) + x_bytes = byte_mask(x_bytes[0], qmask) + x_bytes[1:] + x = util.inflate_long(x_bytes, 1) + if (x > 1) and (x < q): + break + self.x = x + + def _parse_kexgss_groupreq(self, m): + """ + Parse the SSH2_MSG_KEXGSS_GROUPREQ message (server mode). + + :param `.Message` m: The content of the + SSH2_MSG_KEXGSS_GROUPREQ message + """ + minbits = m.get_int() + preferredbits = m.get_int() + maxbits = m.get_int() + # smoosh the user's preferred size into our own limits + if preferredbits > self.max_bits: + preferredbits = self.max_bits + if preferredbits < self.min_bits: + preferredbits = self.min_bits + # fix min/max if they're inconsistent. technically, we could just pout + # and hang up, but there's no harm in giving them the benefit of the + # doubt and just picking a bitsize for them. + if minbits > preferredbits: + minbits = preferredbits + if maxbits < preferredbits: + maxbits = preferredbits + # now save a copy + self.min_bits = minbits + self.preferred_bits = preferredbits + self.max_bits = maxbits + # generate prime + pack = self.transport._get_modulus_pack() + if pack is None: + raise SSHException("Can't do server-side gex with no modulus pack") + self.transport._log( + DEBUG, # noqa + "Picking p ({} <= {} <= {} bits)".format( + minbits, preferredbits, maxbits + ), + ) + self.g, self.p = pack.get_modulus(minbits, preferredbits, maxbits) + m = Message() + m.add_byte(c_MSG_KEXGSS_GROUP) + m.add_mpint(self.p) + m.add_mpint(self.g) + self.transport._send_message(m) + self.transport._expect_packet(MSG_KEXGSS_INIT) + + def _parse_kexgss_group(self, m): + """ + Parse the SSH2_MSG_KEXGSS_GROUP message (client mode). + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_GROUP message + """ + self.p = m.get_mpint() + self.g = m.get_mpint() + # reject if p's bit length < 1024 or > 8192 + bitlen = util.bit_length(self.p) + if (bitlen < 1024) or (bitlen > 8192): + raise SSHException( + "Server-generated gex p (don't ask) is out of range " + "({} bits)".format(bitlen) + ) + self.transport._log( + DEBUG, "Got server p ({} bits)".format(bitlen) + ) # noqa + self._generate_x() + # now compute e = g^x mod p + self.e = pow(self.g, self.x, self.p) + m = Message() + m.add_byte(c_MSG_KEXGSS_INIT) + m.add_string(self.kexgss.ssh_init_sec_context(target=self.gss_host)) + m.add_mpint(self.e) + self.transport._send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_HOSTKEY, + MSG_KEXGSS_CONTINUE, + MSG_KEXGSS_COMPLETE, + MSG_KEXGSS_ERROR, + ) + + def _parse_kexgss_gex_init(self, m): + """ + Parse the SSH2_MSG_KEXGSS_INIT message (server mode). + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_INIT message + """ + client_token = m.get_string() + self.e = m.get_mpint() + if (self.e < 1) or (self.e > self.p - 1): + raise SSHException('Client kex "e" is out of range') + self._generate_x() + self.f = pow(self.g, self.x, self.p) + K = pow(self.e, self.x, self.p) + self.transport.host_key = NullHostKey() + key = self.transport.host_key.__str__() + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + key, + ) + hm.add_int(self.min_bits) + hm.add_int(self.preferred_bits) + hm.add_int(self.max_bits) + hm.add_mpint(self.p) + hm.add_mpint(self.g) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = sha1(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + srv_token = self.kexgss.ssh_accept_sec_context( + self.gss_host, client_token + ) + m = Message() + if self.kexgss._gss_srv_ctxt_status: + mic_token = self.kexgss.ssh_get_mic( + self.transport.session_id, gss_kex=True + ) + m.add_byte(c_MSG_KEXGSS_COMPLETE) + m.add_mpint(self.f) + m.add_string(mic_token) + if srv_token is not None: + m.add_boolean(True) + m.add_string(srv_token) + else: + m.add_boolean(False) + self.transport._send_message(m) + self.transport.gss_kex_used = True + self.transport._activate_outbound() + else: + m.add_byte(c_MSG_KEXGSS_CONTINUE) + m.add_string(srv_token) + self.transport._send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) + + def _parse_kexgss_hostkey(self, m): + """ + Parse the SSH2_MSG_KEXGSS_HOSTKEY message (client mode). + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_HOSTKEY message + """ + # client mode + host_key = m.get_string() + self.transport.host_key = host_key + sig = m.get_string() + self.transport._verify_key(host_key, sig) + self.transport._expect_packet(MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE) + + def _parse_kexgss_continue(self, m): + """ + Parse the SSH2_MSG_KEXGSS_CONTINUE message. + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_CONTINUE message + """ + if not self.transport.server_mode: + srv_token = m.get_string() + m = Message() + m.add_byte(c_MSG_KEXGSS_CONTINUE) + m.add_string( + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + ) + self.transport.send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) + else: + pass + + def _parse_kexgss_complete(self, m): + """ + Parse the SSH2_MSG_KEXGSS_COMPLETE message (client mode). + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_COMPLETE message + """ + if self.transport.host_key is None: + self.transport.host_key = NullHostKey() + self.f = m.get_mpint() + mic_token = m.get_string() + # This must be TRUE, if there is a GSS-API token in this message. + bool = m.get_boolean() + srv_token = None + if bool: + srv_token = m.get_string() + if (self.f < 1) or (self.f > self.p - 1): + raise SSHException('Server kex "f" is out of range') + K = pow(self.f, self.x, self.p) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + self.transport.host_key.__str__(), + ) + if not self.old_style: + hm.add_int(self.min_bits) + hm.add_int(self.preferred_bits) + if not self.old_style: + hm.add_int(self.max_bits) + hm.add_mpint(self.p) + hm.add_mpint(self.g) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = sha1(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + if srv_token is not None: + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + self.kexgss.ssh_check_mic(mic_token, H) + else: + self.kexgss.ssh_check_mic(mic_token, H) + self.transport.gss_kex_used = True + self.transport._activate_outbound() + + def _parse_kexgss_error(self, m): + """ + Parse the SSH2_MSG_KEXGSS_ERROR message (client mode). + The server may send a GSS-API error message. if it does, we display + the error by throwing an exception (client mode). + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_ERROR message + :raise SSHException: Contains GSS-API major and minor status as well as + the error message and the language tag of the + message + """ + maj_status = m.get_int() + min_status = m.get_int() + err_msg = m.get_string() + m.get_string() # we don't care about the language (lang_tag)! + raise SSHException( + """GSS-API Error: +Major Status: {} +Minor Status: {} +Error Message: {} +""".format( + maj_status, min_status, err_msg + ) + ) + + +class NullHostKey: + """ + This class represents the Null Host Key for GSS-API Key Exchange as defined + in `RFC 4462 Section 5 + `_ + """ + + def __init__(self): + self.key = "" + + def __str__(self): + return self.key + + def get_name(self): + return self.key diff --git a/.venv/lib/python3.9/site-packages/paramiko/message.py b/.venv/lib/python3.9/site-packages/paramiko/message.py new file mode 100644 index 0000000..8c2b3bd --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/message.py @@ -0,0 +1,318 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Implementation of an SSH2 "message". +""" + +import struct +from io import BytesIO + +from paramiko import util +from paramiko.common import zero_byte, max_byte, one_byte +from paramiko.util import u + + +class Message: + """ + An SSH2 message is a stream of bytes that encodes some combination of + strings, integers, bools, and infinite-precision integers. This class + builds or breaks down such a byte stream. + + Normally you don't need to deal with anything this low-level, but it's + exposed for people implementing custom extensions, or features that + paramiko doesn't support yet. + """ + + big_int = 0xFF000000 + + def __init__(self, content=None): + """ + Create a new SSH2 message. + + :param bytes content: + the byte stream to use as the message content (passed in only when + decomposing a message). + """ + if content is not None: + self.packet = BytesIO(content) + else: + self.packet = BytesIO() + + def __bytes__(self): + return self.asbytes() + + def __repr__(self): + """ + Returns a string representation of this object, for debugging. + """ + return "paramiko.Message(" + repr(self.packet.getvalue()) + ")" + + # TODO 4.0: just merge into __bytes__ (everywhere) + def asbytes(self): + """ + Return the byte stream content of this Message, as a `bytes`. + """ + return self.packet.getvalue() + + def rewind(self): + """ + Rewind the message to the beginning as if no items had been parsed + out of it yet. + """ + self.packet.seek(0) + + def get_remainder(self): + """ + Return the `bytes` of this message that haven't already been parsed and + returned. + """ + position = self.packet.tell() + remainder = self.packet.read() + self.packet.seek(position) + return remainder + + def get_so_far(self): + """ + Returns the `bytes` of this message that have been parsed and + returned. The string passed into a message's constructor can be + regenerated by concatenating ``get_so_far`` and `get_remainder`. + """ + position = self.packet.tell() + self.rewind() + return self.packet.read(position) + + def get_bytes(self, n): + """ + Return the next ``n`` bytes of the message, without decomposing into an + int, decoded string, etc. Just the raw bytes are returned. Returns a + string of ``n`` zero bytes if there weren't ``n`` bytes remaining in + the message. + """ + b = self.packet.read(n) + max_pad_size = 1 << 20 # Limit padding to 1 MB + if len(b) < n < max_pad_size: + return b + zero_byte * (n - len(b)) + return b + + def get_byte(self): + """ + Return the next byte of the message, without decomposing it. This + is equivalent to `get_bytes(1) `. + + :return: + the next (`bytes`) byte of the message, or ``b'\000'`` if there + aren't any bytes remaining. + """ + return self.get_bytes(1) + + def get_boolean(self): + """ + Fetch a boolean from the stream. + """ + b = self.get_bytes(1) + return b != zero_byte + + def get_adaptive_int(self): + """ + Fetch an int from the stream. + + :return: a 32-bit unsigned `int`. + """ + byte = self.get_bytes(1) + if byte == max_byte: + return util.inflate_long(self.get_binary()) + byte += self.get_bytes(3) + return struct.unpack(">I", byte)[0] + + def get_int(self): + """ + Fetch an int from the stream. + """ + return struct.unpack(">I", self.get_bytes(4))[0] + + def get_int64(self): + """ + Fetch a 64-bit int from the stream. + + :return: a 64-bit unsigned integer (`int`). + """ + return struct.unpack(">Q", self.get_bytes(8))[0] + + def get_mpint(self): + """ + Fetch a long int (mpint) from the stream. + + :return: an arbitrary-length integer (`int`). + """ + return util.inflate_long(self.get_binary()) + + # TODO 4.0: depending on where this is used internally or downstream, force + # users to specify get_binary instead and delete this. + def get_string(self): + """ + Fetch a "string" from the stream. This will actually be a `bytes` + object, and may contain unprintable characters. (It's not unheard of + for a string to contain another byte-stream message.) + """ + return self.get_bytes(self.get_int()) + + # TODO 4.0: also consider having this take over the get_string name, and + # remove this name instead. + def get_text(self): + """ + Fetch a Unicode string from the stream. + + This currently operates by attempting to encode the next "string" as + ``utf-8``. + """ + return u(self.get_string()) + + def get_binary(self): + """ + Alias for `get_string` (obtains a bytestring). + """ + return self.get_bytes(self.get_int()) + + def get_list(self): + """ + Fetch a list of `strings ` from the stream. + + These are trivially encoded as comma-separated values in a string. + """ + return self.get_text().split(",") + + def add_bytes(self, b): + """ + Write bytes to the stream, without any formatting. + + :param bytes b: bytes to add + """ + self.packet.write(b) + return self + + def add_byte(self, b): + """ + Write a single byte to the stream, without any formatting. + + :param bytes b: byte to add + """ + self.packet.write(b) + return self + + def add_boolean(self, b): + """ + Add a boolean value to the stream. + + :param bool b: boolean value to add + """ + if b: + self.packet.write(one_byte) + else: + self.packet.write(zero_byte) + return self + + def add_int(self, n): + """ + Add an integer to the stream. + + :param int n: integer to add + """ + self.packet.write(struct.pack(">I", n)) + return self + + def add_adaptive_int(self, n): + """ + Add an integer to the stream. + + :param int n: integer to add + """ + if n >= Message.big_int: + self.packet.write(max_byte) + self.add_string(util.deflate_long(n)) + else: + self.packet.write(struct.pack(">I", n)) + return self + + def add_int64(self, n): + """ + Add a 64-bit int to the stream. + + :param int n: long int to add + """ + self.packet.write(struct.pack(">Q", n)) + return self + + def add_mpint(self, z): + """ + Add a long int to the stream, encoded as an infinite-precision + integer. This method only works on positive numbers. + + :param int z: long int to add + """ + self.add_string(util.deflate_long(z)) + return self + + # TODO: see the TODO for get_string/get_text/et al, this should change + # to match. + def add_string(self, s): + """ + Add a bytestring to the stream. + + :param byte s: bytestring to add + """ + s = util.asbytes(s) + self.add_int(len(s)) + self.packet.write(s) + return self + + def add_list(self, l): # noqa: E741 + """ + Add a list of strings to the stream. They are encoded identically to + a single string of values separated by commas. (Yes, really, that's + how SSH2 does it.) + + :param l: list of strings to add + """ + self.add_string(",".join(l)) + return self + + def _add(self, i): + if type(i) is bool: + return self.add_boolean(i) + elif isinstance(i, int): + return self.add_adaptive_int(i) + elif type(i) is list: + return self.add_list(i) + else: + return self.add_string(i) + + # TODO: this would never have worked for unicode strings under Python 3, + # guessing nobody/nothing ever used it for that purpose? + def add(self, *seq): + """ + Add a sequence of items to the stream. The values are encoded based + on their type: bytes, str, int, bool, or list. + + .. warning:: + Longs are encoded non-deterministically. Don't use this method. + + :param seq: the sequence of items + """ + for item in seq: + self._add(item) diff --git a/.venv/lib/python3.9/site-packages/paramiko/packet.py b/.venv/lib/python3.9/site-packages/paramiko/packet.py new file mode 100644 index 0000000..f1de4b0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/packet.py @@ -0,0 +1,696 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Packet handling +""" + +import errno +import os +import socket +import struct +import threading +import time +from hmac import HMAC + +from paramiko import util +from paramiko.common import ( + linefeed_byte, + cr_byte_value, + MSG_NAMES, + DEBUG, + xffffffff, + zero_byte, + byte_ord, +) +from paramiko.util import u +from paramiko.ssh_exception import SSHException, ProxyCommandFailure +from paramiko.message import Message + + +def compute_hmac(key, message, digest_class): + return HMAC(key, message, digest_class).digest() + + +class NeedRekeyException(Exception): + """ + Exception indicating a rekey is needed. + """ + + pass + + +def first_arg(e): + arg = None + if type(e.args) is tuple and len(e.args) > 0: + arg = e.args[0] + return arg + + +class Packetizer: + """ + Implementation of the base SSH packet protocol. + """ + + # READ the secsh RFC's before raising these values. if anything, + # they should probably be lower. + REKEY_PACKETS = pow(2, 29) + REKEY_BYTES = pow(2, 29) + + # Allow receiving this many packets after a re-key request before + # terminating + REKEY_PACKETS_OVERFLOW_MAX = pow(2, 29) + # Allow receiving this many bytes after a re-key request before terminating + REKEY_BYTES_OVERFLOW_MAX = pow(2, 29) + + def __init__(self, socket): + self.__socket = socket + self.__logger = None + self.__closed = False + self.__dump_packets = False + self.__need_rekey = False + self.__init_count = 0 + self.__remainder = bytes() + self._initial_kex_done = False + + # used for noticing when to re-key: + self.__sent_bytes = 0 + self.__sent_packets = 0 + self.__received_bytes = 0 + self.__received_packets = 0 + self.__received_bytes_overflow = 0 + self.__received_packets_overflow = 0 + + # current inbound/outbound ciphering: + self.__block_size_out = 8 + self.__block_size_in = 8 + self.__mac_size_out = 0 + self.__mac_size_in = 0 + self.__block_engine_out = None + self.__block_engine_in = None + self.__sdctr_out = False + self.__mac_engine_out = None + self.__mac_engine_in = None + self.__mac_key_out = bytes() + self.__mac_key_in = bytes() + self.__compress_engine_out = None + self.__compress_engine_in = None + self.__sequence_number_out = 0 + self.__sequence_number_in = 0 + self.__etm_out = False + self.__etm_in = False + + # AEAD (eg aes128-gcm/aes256-gcm) cipher use + self.__aead_out = False + self.__aead_in = False + self.__iv_out = None + self.__iv_in = None + + # lock around outbound writes (packet computation) + self.__write_lock = threading.RLock() + + # keepalives: + self.__keepalive_interval = 0 + self.__keepalive_last = time.time() + self.__keepalive_callback = None + + self.__timer = None + self.__handshake_complete = False + self.__timer_expired = False + + @property + def closed(self): + return self.__closed + + def reset_seqno_out(self): + self.__sequence_number_out = 0 + + def reset_seqno_in(self): + self.__sequence_number_in = 0 + + def set_log(self, log): + """ + Set the Python log object to use for logging. + """ + self.__logger = log + + def set_outbound_cipher( + self, + block_engine, + block_size, + mac_engine, + mac_size, + mac_key, + sdctr=False, + etm=False, + aead=False, + iv_out=None, + ): + """ + Switch outbound data cipher. + :param etm: Set encrypt-then-mac from OpenSSH + """ + self.__block_engine_out = block_engine + self.__sdctr_out = sdctr + self.__block_size_out = block_size + self.__mac_engine_out = mac_engine + self.__mac_size_out = mac_size + self.__mac_key_out = mac_key + self.__sent_bytes = 0 + self.__sent_packets = 0 + self.__etm_out = etm + self.__aead_out = aead + self.__iv_out = iv_out + # wait until the reset happens in both directions before clearing + # rekey flag + self.__init_count |= 1 + if self.__init_count == 3: + self.__init_count = 0 + self.__need_rekey = False + + def set_inbound_cipher( + self, + block_engine, + block_size, + mac_engine, + mac_size, + mac_key, + etm=False, + aead=False, + iv_in=None, + ): + """ + Switch inbound data cipher. + :param etm: Set encrypt-then-mac from OpenSSH + """ + self.__block_engine_in = block_engine + self.__block_size_in = block_size + self.__mac_engine_in = mac_engine + self.__mac_size_in = mac_size + self.__mac_key_in = mac_key + self.__received_bytes = 0 + self.__received_packets = 0 + self.__received_bytes_overflow = 0 + self.__received_packets_overflow = 0 + self.__etm_in = etm + self.__aead_in = aead + self.__iv_in = iv_in + # wait until the reset happens in both directions before clearing + # rekey flag + self.__init_count |= 2 + if self.__init_count == 3: + self.__init_count = 0 + self.__need_rekey = False + + def set_outbound_compressor(self, compressor): + self.__compress_engine_out = compressor + + def set_inbound_compressor(self, compressor): + self.__compress_engine_in = compressor + + def close(self): + self.__closed = True + self.__socket.close() + + def set_hexdump(self, hexdump): + self.__dump_packets = hexdump + + def get_hexdump(self): + return self.__dump_packets + + def get_mac_size_in(self): + return self.__mac_size_in + + def get_mac_size_out(self): + return self.__mac_size_out + + def need_rekey(self): + """ + Returns ``True`` if a new set of keys needs to be negotiated. This + will be triggered during a packet read or write, so it should be + checked after every read or write, or at least after every few. + """ + return self.__need_rekey + + def set_keepalive(self, interval, callback): + """ + Turn on/off the callback keepalive. If ``interval`` seconds pass with + no data read from or written to the socket, the callback will be + executed and the timer will be reset. + """ + self.__keepalive_interval = interval + self.__keepalive_callback = callback + self.__keepalive_last = time.time() + + def read_timer(self): + self.__timer_expired = True + + def start_handshake(self, timeout): + """ + Tells `Packetizer` that the handshake process started. + Starts a book keeping timer that can signal a timeout in the + handshake process. + + :param float timeout: amount of seconds to wait before timing out + """ + if not self.__timer: + self.__timer = threading.Timer(float(timeout), self.read_timer) + self.__timer.start() + + def handshake_timed_out(self): + """ + Checks if the handshake has timed out. + + If `start_handshake` wasn't called before the call to this function, + the return value will always be `False`. If the handshake completed + before a timeout was reached, the return value will be `False` + + :return: handshake time out status, as a `bool` + """ + if not self.__timer: + return False + if self.__handshake_complete: + return False + return self.__timer_expired + + def complete_handshake(self): + """ + Tells `Packetizer` that the handshake has completed. + """ + if self.__timer: + self.__timer.cancel() + self.__timer_expired = False + self.__handshake_complete = True + + def read_all(self, n, check_rekey=False): + """ + Read as close to N bytes as possible, blocking as long as necessary. + + :param int n: number of bytes to read + :return: the data read, as a `str` + + :raises: + ``EOFError`` -- if the socket was closed before all the bytes could + be read + """ + out = bytes() + # handle over-reading from reading the banner line + if len(self.__remainder) > 0: + out = self.__remainder[:n] + self.__remainder = self.__remainder[n:] + n -= len(out) + while n > 0: + got_timeout = False + if self.handshake_timed_out(): + raise EOFError() + try: + x = self.__socket.recv(n) + if len(x) == 0: + raise EOFError() + out += x + n -= len(x) + except socket.timeout: + got_timeout = True + except socket.error as e: + # on Linux, sometimes instead of socket.timeout, we get + # EAGAIN. this is a bug in recent (> 2.6.9) kernels but + # we need to work around it. + arg = first_arg(e) + if arg == errno.EAGAIN: + got_timeout = True + elif self.__closed: + raise EOFError() + else: + raise + if got_timeout: + if self.__closed: + raise EOFError() + if check_rekey and (len(out) == 0) and self.__need_rekey: + raise NeedRekeyException() + self._check_keepalive() + return out + + def write_all(self, out): + self.__keepalive_last = time.time() + iteration_with_zero_as_return_value = 0 + while len(out) > 0: + retry_write = False + try: + n = self.__socket.send(out) + except socket.timeout: + retry_write = True + except socket.error as e: + arg = first_arg(e) + if arg == errno.EAGAIN: + retry_write = True + else: + n = -1 + except ProxyCommandFailure: + raise # so it doesn't get swallowed by the below catchall + except Exception: + # could be: (32, 'Broken pipe') + n = -1 + if retry_write: + n = 0 + if self.__closed: + n = -1 + else: + if n == 0 and iteration_with_zero_as_return_value > 10: + # We shouldn't retry the write, but we didn't + # manage to send anything over the socket. This might be an + # indication that we have lost contact with the remote + # side, but are yet to receive an EOFError or other socket + # errors. Let's give it some iteration to try and catch up. + n = -1 + iteration_with_zero_as_return_value += 1 + if n < 0: + raise EOFError() + if n == len(out): + break + out = out[n:] + return + + def readline(self, timeout): + """ + Read a line from the socket. We assume no data is pending after the + line, so it's okay to attempt large reads. + """ + buf = self.__remainder + while linefeed_byte not in buf: + buf += self._read_timeout(timeout) + n = buf.index(linefeed_byte) + self.__remainder = buf[n + 1 :] + buf = buf[:n] + if (len(buf) > 0) and (buf[-1] == cr_byte_value): + buf = buf[:-1] + return u(buf) + + def _inc_iv_counter(self, iv): + # Per https://www.rfc-editor.org/rfc/rfc5647.html#section-7.1 , + # we increment the last 8 bytes of the 12-byte IV... + iv_counter_b = iv[4:] + iv_counter = int.from_bytes(iv_counter_b, "big") + inc_iv_counter = iv_counter + 1 + inc_iv_counter_b = inc_iv_counter.to_bytes(8, "big") + # ...then re-concatenate it with the static first 4 bytes + new_iv = iv[0:4] + inc_iv_counter_b + return new_iv + + def send_message(self, data): + """ + Write a block of data using the current cipher, as an SSH block. + """ + # encrypt this sucka + data = data.asbytes() + cmd = byte_ord(data[0]) + if cmd in MSG_NAMES: + cmd_name = MSG_NAMES[cmd] + else: + cmd_name = "${:x}".format(cmd) + orig_len = len(data) + self.__write_lock.acquire() + try: + if self.__compress_engine_out is not None: + data = self.__compress_engine_out(data) + packet = self._build_packet(data) + if self.__dump_packets: + self._log( + DEBUG, + "Write packet <{}>, length {}".format(cmd_name, orig_len), + ) + self._log(DEBUG, util.format_binary(packet, "OUT: ")) + if self.__block_engine_out is not None: + if self.__etm_out: + # packet length is not encrypted in EtM + out = packet[0:4] + self.__block_engine_out.update( + packet[4:] + ) + elif self.__aead_out: + # Packet-length field is used as the 'associated data' + # under AES-GCM, so like EtM, it's not encrypted. See + # https://www.rfc-editor.org/rfc/rfc5647#section-7.3 + out = packet[0:4] + self.__block_engine_out.encrypt( + self.__iv_out, packet[4:], packet[0:4] + ) + self.__iv_out = self._inc_iv_counter(self.__iv_out) + else: + out = self.__block_engine_out.update(packet) + else: + out = packet + # Append an MAC when needed (eg, not under AES-GCM) + if self.__block_engine_out is not None and not self.__aead_out: + packed = struct.pack(">I", self.__sequence_number_out) + payload = packed + (out if self.__etm_out else packet) + out += compute_hmac( + self.__mac_key_out, payload, self.__mac_engine_out + )[: self.__mac_size_out] + next_seq = (self.__sequence_number_out + 1) & xffffffff + if next_seq == 0 and not self._initial_kex_done: + raise SSHException( + "Sequence number rolled over during initial kex!" + ) + self.__sequence_number_out = next_seq + self.write_all(out) + + self.__sent_bytes += len(out) + self.__sent_packets += 1 + sent_too_much = ( + self.__sent_packets >= self.REKEY_PACKETS + or self.__sent_bytes >= self.REKEY_BYTES + ) + if sent_too_much and not self.__need_rekey: + # only ask once for rekeying + msg = "Rekeying (hit {} packets, {} bytes sent)" + self._log( + DEBUG, msg.format(self.__sent_packets, self.__sent_bytes) + ) + self.__received_bytes_overflow = 0 + self.__received_packets_overflow = 0 + self._trigger_rekey() + finally: + self.__write_lock.release() + + def read_message(self): + """ + Only one thread should ever be in this function (no other locking is + done). + + :raises: `.SSHException` -- if the packet is mangled + :raises: `.NeedRekeyException` -- if the transport should rekey + """ + header = self.read_all(self.__block_size_in, check_rekey=True) + if self.__etm_in: + packet_size = struct.unpack(">I", header[:4])[0] + remaining = packet_size - self.__block_size_in + 4 + packet = header[4:] + self.read_all(remaining, check_rekey=False) + mac = self.read_all(self.__mac_size_in, check_rekey=False) + mac_payload = ( + struct.pack(">II", self.__sequence_number_in, packet_size) + + packet + ) + my_mac = compute_hmac( + self.__mac_key_in, mac_payload, self.__mac_engine_in + )[: self.__mac_size_in] + if not util.constant_time_bytes_eq(my_mac, mac): + raise SSHException("Mismatched MAC") + header = packet + + if self.__aead_in: + # Grab unencrypted (considered 'additional data' under GCM) packet + # length. + packet_size = struct.unpack(">I", header[:4])[0] + aad = header[:4] + remaining = ( + packet_size - self.__block_size_in + 4 + self.__mac_size_in + ) + packet = header[4:] + self.read_all(remaining, check_rekey=False) + header = self.__block_engine_in.decrypt(self.__iv_in, packet, aad) + + self.__iv_in = self._inc_iv_counter(self.__iv_in) + + if self.__block_engine_in is not None and not self.__aead_in: + header = self.__block_engine_in.update(header) + if self.__dump_packets: + self._log(DEBUG, util.format_binary(header, "IN: ")) + + # When ETM or AEAD (GCM) are in use, we've already read the packet size + # & decrypted everything, so just set the packet back to the header we + # obtained. + if self.__etm_in or self.__aead_in: + packet = header + # Otherwise, use the older non-ETM logic + else: + packet_size = struct.unpack(">I", header[:4])[0] + + # leftover contains decrypted bytes from the first block (after the + # length field) + leftover = header[4:] + if (packet_size - len(leftover)) % self.__block_size_in != 0: + raise SSHException("Invalid packet blocking") + buf = self.read_all( + packet_size + self.__mac_size_in - len(leftover) + ) + packet = buf[: packet_size - len(leftover)] + post_packet = buf[packet_size - len(leftover) :] + + if self.__block_engine_in is not None: + packet = self.__block_engine_in.update(packet) + packet = leftover + packet + + if self.__dump_packets: + self._log(DEBUG, util.format_binary(packet, "IN: ")) + + if self.__mac_size_in > 0 and not self.__etm_in and not self.__aead_in: + mac = post_packet[: self.__mac_size_in] + mac_payload = ( + struct.pack(">II", self.__sequence_number_in, packet_size) + + packet + ) + my_mac = compute_hmac( + self.__mac_key_in, mac_payload, self.__mac_engine_in + )[: self.__mac_size_in] + if not util.constant_time_bytes_eq(my_mac, mac): + raise SSHException("Mismatched MAC") + padding = byte_ord(packet[0]) + payload = packet[1 : packet_size - padding] + + if self.__dump_packets: + self._log( + DEBUG, + "Got payload ({} bytes, {} padding)".format( + packet_size, padding + ), + ) + + if self.__compress_engine_in is not None: + payload = self.__compress_engine_in(payload) + + msg = Message(payload[1:]) + msg.seqno = self.__sequence_number_in + next_seq = (self.__sequence_number_in + 1) & xffffffff + if next_seq == 0 and not self._initial_kex_done: + raise SSHException( + "Sequence number rolled over during initial kex!" + ) + self.__sequence_number_in = next_seq + + # check for rekey + raw_packet_size = packet_size + self.__mac_size_in + 4 + self.__received_bytes += raw_packet_size + self.__received_packets += 1 + if self.__need_rekey: + # we've asked to rekey -- give them some packets to comply before + # dropping the connection + self.__received_bytes_overflow += raw_packet_size + self.__received_packets_overflow += 1 + if ( + self.__received_packets_overflow + >= self.REKEY_PACKETS_OVERFLOW_MAX + ) or ( + self.__received_bytes_overflow >= self.REKEY_BYTES_OVERFLOW_MAX + ): + raise SSHException( + "Remote transport is ignoring rekey requests" + ) + elif (self.__received_packets >= self.REKEY_PACKETS) or ( + self.__received_bytes >= self.REKEY_BYTES + ): + # only ask once for rekeying + err = "Rekeying (hit {} packets, {} bytes received)" + self._log( + DEBUG, + err.format(self.__received_packets, self.__received_bytes), + ) + self.__received_bytes_overflow = 0 + self.__received_packets_overflow = 0 + self._trigger_rekey() + + cmd = byte_ord(payload[0]) + if cmd in MSG_NAMES: + cmd_name = MSG_NAMES[cmd] + else: + cmd_name = "${:x}".format(cmd) + if self.__dump_packets: + self._log( + DEBUG, + "Read packet <{}>, length {}".format(cmd_name, len(payload)), + ) + return cmd, msg + + # ...protected... + + def _log(self, level, msg): + if self.__logger is None: + return + if issubclass(type(msg), list): + for m in msg: + self.__logger.log(level, m) + else: + self.__logger.log(level, msg) + + def _check_keepalive(self): + if ( + not self.__keepalive_interval + or not self.__block_engine_out + or self.__need_rekey + ): + # wait till we're encrypting, and not in the middle of rekeying + return + now = time.time() + if now > self.__keepalive_last + self.__keepalive_interval: + self.__keepalive_callback() + self.__keepalive_last = now + + def _read_timeout(self, timeout): + start = time.time() + while True: + try: + x = self.__socket.recv(128) + if len(x) == 0: + raise EOFError() + break + except socket.timeout: + pass + if self.__closed: + raise EOFError() + now = time.time() + if now - start >= timeout: + raise socket.timeout() + return x + + def _build_packet(self, payload): + # pad up at least 4 bytes, to nearest block-size (usually 8) + bsize = self.__block_size_out + # do not include payload length in computations for padding in EtM mode + # (payload length won't be encrypted) + addlen = 4 if self.__etm_out or self.__aead_out else 8 + padding = 3 + bsize - ((len(payload) + addlen) % bsize) + packet = struct.pack(">IB", len(payload) + padding + 1, padding) + packet += payload + if self.__sdctr_out or self.__block_engine_out is None: + # cute trick i caught openssh doing: if we're not encrypting or + # SDCTR mode (RFC4344), + # don't waste random bytes for the padding + packet += zero_byte * padding + else: + packet += os.urandom(padding) + return packet + + def _trigger_rekey(self): + # outside code should check for this flag + self.__need_rekey = True diff --git a/.venv/lib/python3.9/site-packages/paramiko/pipe.py b/.venv/lib/python3.9/site-packages/paramiko/pipe.py new file mode 100644 index 0000000..65944fa --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/pipe.py @@ -0,0 +1,148 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Abstraction of a one-way pipe where the read end can be used in +`select.select`. Normally this is trivial, but Windows makes it nearly +impossible. + +The pipe acts like an Event, which can be set or cleared. When set, the pipe +will trigger as readable in `select `. +""" + +import sys +import os +import socket + + +def make_pipe(): + if sys.platform[:3] != "win": + p = PosixPipe() + else: + p = WindowsPipe() + return p + + +class PosixPipe: + def __init__(self): + self._rfd, self._wfd = os.pipe() + self._set = False + self._forever = False + self._closed = False + + def close(self): + os.close(self._rfd) + os.close(self._wfd) + # used for unit tests: + self._closed = True + + def fileno(self): + return self._rfd + + def clear(self): + if not self._set or self._forever: + return + os.read(self._rfd, 1) + self._set = False + + def set(self): + if self._set or self._closed: + return + self._set = True + os.write(self._wfd, b"*") + + def set_forever(self): + self._forever = True + self.set() + + +class WindowsPipe: + """ + On Windows, only an OS-level "WinSock" may be used in select(), but reads + and writes must be to the actual socket object. + """ + + def __init__(self): + serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + serv.bind(("127.0.0.1", 0)) + serv.listen(1) + + # need to save sockets in _rsock/_wsock so they don't get closed + self._rsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self._rsock.connect(("127.0.0.1", serv.getsockname()[1])) + + self._wsock, addr = serv.accept() + serv.close() + self._set = False + self._forever = False + self._closed = False + + def close(self): + self._rsock.close() + self._wsock.close() + # used for unit tests: + self._closed = True + + def fileno(self): + return self._rsock.fileno() + + def clear(self): + if not self._set or self._forever: + return + self._rsock.recv(1) + self._set = False + + def set(self): + if self._set or self._closed: + return + self._set = True + self._wsock.send(b"*") + + def set_forever(self): + self._forever = True + self.set() + + +class OrPipe: + def __init__(self, pipe): + self._set = False + self._partner = None + self._pipe = pipe + + def set(self): + self._set = True + if not self._partner._set: + self._pipe.set() + + def clear(self): + self._set = False + if not self._partner._set: + self._pipe.clear() + + +def make_or_pipe(pipe): + """ + wraps a pipe into two pipe-like objects which are "or"d together to + affect the real pipe. if either returned pipe is set, the wrapped pipe + is set. when both are cleared, the wrapped pipe is cleared. + """ + p1 = OrPipe(pipe) + p2 = OrPipe(pipe) + p1._partner = p2 + p2._partner = p1 + return p1, p2 diff --git a/.venv/lib/python3.9/site-packages/paramiko/pkey.py b/.venv/lib/python3.9/site-packages/paramiko/pkey.py new file mode 100644 index 0000000..50558cb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/pkey.py @@ -0,0 +1,955 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Common API for all public keys. +""" + +import base64 +from base64 import encodebytes, decodebytes +from binascii import unhexlify +import os +from pathlib import Path +from hashlib import md5, sha256 +import re +import struct + +import bcrypt + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import padding, serialization +from cryptography.hazmat.primitives.ciphers import algorithms, modes, Cipher +from cryptography.hazmat.primitives import asymmetric + +from paramiko import util +from paramiko.util import u, b +from paramiko.common import o600 +from paramiko.ssh_exception import SSHException, PasswordRequiredException +from paramiko.message import Message + + +# TripleDES is moving from `cryptography.hazmat.primitives.ciphers.algorithms` +# in cryptography>=43.0.0 to `cryptography.hazmat.decrepit.ciphers.algorithms` +# It will be removed from `cryptography.hazmat.primitives.ciphers.algorithms` +# in cryptography==48.0.0. +# +# Source References: +# - https://github.com/pyca/cryptography/commit/722a6393e61b3ac +# - https://github.com/pyca/cryptography/pull/11407/files +try: + from cryptography.hazmat.decrepit.ciphers.algorithms import TripleDES +except ImportError: + from cryptography.hazmat.primitives.ciphers.algorithms import TripleDES + + +OPENSSH_AUTH_MAGIC = b"openssh-key-v1\x00" + + +def _unpad_openssh(data): + # At the moment, this is only used for unpadding private keys on disk. This + # really ought to be made constant time (possibly by upstreaming this logic + # into pyca/cryptography). + padding_length = data[-1] + if 0x20 <= padding_length < 0x7F: + return data # no padding, last byte part comment (printable ascii) + if padding_length > 15: + raise SSHException("Invalid key") + for i in range(padding_length): + if data[i - padding_length] != i + 1: + raise SSHException("Invalid key") + return data[:-padding_length] + + +class UnknownKeyType(Exception): + """ + An unknown public/private key algorithm was attempted to be read. + """ + + def __init__(self, key_type=None, key_bytes=None): + self.key_type = key_type + self.key_bytes = key_bytes + + def __str__(self): + return f"UnknownKeyType(type={self.key_type!r}, bytes=<{len(self.key_bytes)}>)" # noqa + + +class PKey: + """ + Base class for public keys. + + Also includes some "meta" level convenience constructors such as + `.from_type_string`. + """ + + # known encryption types for private key files: + _CIPHER_TABLE = { + "AES-128-CBC": { + "cipher": algorithms.AES, + "keysize": 16, + "blocksize": 16, + "mode": modes.CBC, + }, + "AES-256-CBC": { + "cipher": algorithms.AES, + "keysize": 32, + "blocksize": 16, + "mode": modes.CBC, + }, + "DES-EDE3-CBC": { + "cipher": TripleDES, + "keysize": 24, + "blocksize": 8, + "mode": modes.CBC, + }, + } + _PRIVATE_KEY_FORMAT_ORIGINAL = 1 + _PRIVATE_KEY_FORMAT_OPENSSH = 2 + BEGIN_TAG = re.compile(r"^-{5}BEGIN (RSA|EC|OPENSSH) PRIVATE KEY-{5}\s*$") + END_TAG = re.compile(r"^-{5}END (RSA|EC|OPENSSH) PRIVATE KEY-{5}\s*$") + + @staticmethod + def from_path(path, passphrase=None): + """ + Attempt to instantiate appropriate key subclass from given file path. + + :param Path path: The path to load (may also be a `str`). + + :returns: + A `PKey` subclass instance. + + :raises: + `UnknownKeyType`, if our crypto backend doesn't know this key type. + + .. versionadded:: 3.2 + """ + # TODO: make sure sphinx is reading Path right in param list... + + # Lazy import to avoid circular import issues + from paramiko import RSAKey, Ed25519Key, ECDSAKey + + # Normalize to string, as cert suffix isn't quite an extension, so + # pathlib isn't useful for this. + path = str(path) + + # Sort out cert vs key, i.e. it is 'legal' to hand this kind of API + # /either/ the key /or/ the cert, when there is a key/cert pair. + cert_suffix = "-cert.pub" + if str(path).endswith(cert_suffix): + key_path = path[: -len(cert_suffix)] + cert_path = path + else: + key_path = path + cert_path = path + cert_suffix + + key_path = Path(key_path).expanduser() + cert_path = Path(cert_path).expanduser() + + data = key_path.read_bytes() + # Like OpenSSH, try modern/OpenSSH-specific key load first + try: + loaded = serialization.load_ssh_private_key( + data=data, password=passphrase + ) + # Then fall back to assuming legacy PEM type + except ValueError: + loaded = serialization.load_pem_private_key( + data=data, password=passphrase + ) + # TODO Python 3.10: match statement? (NOTE: we cannot use a dict + # because the results from the loader are literal backend, eg openssl, + # private classes, so isinstance tests work but exact 'x class is y' + # tests will not work) + # TODO: leverage already-parsed/math'd obj to avoid duplicate cpu + # cycles? seemingly requires most of our key subclasses to be rewritten + # to be cryptography-object-forward. this is still likely faster than + # the old SSHClient code that just tried instantiating every class! + key_class = None + if isinstance(loaded, asymmetric.rsa.RSAPrivateKey): + key_class = RSAKey + elif isinstance(loaded, asymmetric.ed25519.Ed25519PrivateKey): + key_class = Ed25519Key + elif isinstance(loaded, asymmetric.ec.EllipticCurvePrivateKey): + key_class = ECDSAKey + else: + raise UnknownKeyType(key_bytes=data, key_type=loaded.__class__) + with key_path.open() as fd: + key = key_class.from_private_key(fd, password=passphrase) + if cert_path.exists(): + # load_certificate can take Message, path-str, or value-str + key.load_certificate(str(cert_path)) + return key + + @staticmethod + def from_type_string(key_type, key_bytes): + """ + Given type `str` & raw `bytes`, return a `PKey` subclass instance. + + For example, ``PKey.from_type_string("ssh-ed25519", )`` + will (if successful) return a new `.Ed25519Key`. + + :param str key_type: + The key type, eg ``"ssh-ed25519"``. + :param bytes key_bytes: + The raw byte data forming the key material, as expected by + subclasses' ``data`` parameter. + + :returns: + A `PKey` subclass instance. + + :raises: + `UnknownKeyType`, if no registered classes knew about this type. + + .. versionadded:: 3.2 + """ + from paramiko import key_classes + + for key_class in key_classes: + if key_type in key_class.identifiers(): + # TODO: needs to passthru things like passphrase + return key_class(data=key_bytes) + raise UnknownKeyType(key_type=key_type, key_bytes=key_bytes) + + @classmethod + def identifiers(cls): + """ + returns an iterable of key format/name strings this class can handle. + + Most classes only have a single identifier, and thus this default + implementation suffices; see `.ECDSAKey` for one example of an + override. + """ + return [cls.name] + + # TODO 4.0: make this and subclasses consistent, some of our own + # classmethods even assume kwargs we don't define! + # TODO 4.0: prob also raise NotImplementedError instead of pass'ing; the + # contract is pretty obviously that you need to handle msg/data/filename + # appropriately. (If 'pass' is a concession to testing, see about doing the + # work to fix the tests instead) + def __init__(self, msg=None, data=None): + """ + Create a new instance of this public key type. If ``msg`` is given, + the key's public part(s) will be filled in from the message. If + ``data`` is given, the key's public part(s) will be filled in from + the string. + + :param .Message msg: + an optional SSH `.Message` containing a public key of this type. + :param bytes data: + optional, the bytes of a public key of this type + + :raises: `.SSHException` -- + if a key cannot be created from the ``data`` or ``msg`` given, or + no key was passed in. + """ + pass + + # TODO: arguably this might want to be __str__ instead? ehh + # TODO: ditto the interplay between showing class name (currently we just + # say PKey writ large) and algorithm (usually == class name, but not + # always, also sometimes shows certificate-ness) + # TODO: if we do change it, we also want to tweak eg AgentKey, as it + # currently displays agent-ness with a suffix + def __repr__(self): + comment = "" + # Works for AgentKey, may work for others? + if hasattr(self, "comment") and self.comment: + comment = f", comment={self.comment!r}" + return f"PKey(alg={self.algorithm_name}, bits={self.get_bits()}, fp={self.fingerprint}{comment})" # noqa + + # TODO 4.0: just merge into __bytes__ (everywhere) + def asbytes(self): + """ + Return a string of an SSH `.Message` made up of the public part(s) of + this key. This string is suitable for passing to `__init__` to + re-create the key object later. + """ + return bytes() + + def __bytes__(self): + return self.asbytes() + + def __eq__(self, other): + return isinstance(other, PKey) and self._fields == other._fields + + def __hash__(self): + return hash(self._fields) + + @property + def _fields(self): + raise NotImplementedError + + def get_name(self): + """ + Return the name of this private key implementation. + + :return: + name of this private key type, in SSH terminology, as a `str` (for + example, ``"ssh-rsa"``). + """ + return "" + + @property + def algorithm_name(self): + """ + Return the key algorithm identifier for this key. + + Similar to `get_name`, but aimed at pure algorithm name instead of SSH + protocol field value. + """ + # Nuke the leading 'ssh-' + # TODO in Python 3.9: use .removeprefix() + name = self.get_name().replace("ssh-", "") + # Trim any cert suffix (but leave the -cert, as OpenSSH does) + cert_tail = "-cert-v01@openssh.com" + if cert_tail in name: + name = name.replace(cert_tail, "-cert") + # Nuke any eg ECDSA suffix, OpenSSH does basically this too. + else: + name = name.split("-")[0] + return name.upper() + + def get_bits(self): + """ + Return the number of significant bits in this key. This is useful + for judging the relative security of a key. + + :return: bits in the key (as an `int`) + """ + # TODO 4.0: raise NotImplementedError, 0 is unlikely to ever be + # _correct_ and nothing in the critical path seems to use this. + return 0 + + def can_sign(self): + """ + Return ``True`` if this key has the private part necessary for signing + data. + """ + return False + + def get_fingerprint(self): + """ + Return an MD5 fingerprint of the public part of this key. Nothing + secret is revealed. + + :return: + a 16-byte `string ` (binary) of the MD5 fingerprint, in SSH + format. + """ + return md5(self.asbytes()).digest() + + @property + def fingerprint(self): + """ + Modern fingerprint property designed to be comparable to OpenSSH. + + Currently only does SHA256 (the OpenSSH default). + + .. versionadded:: 3.2 + """ + hashy = sha256(bytes(self)) + hash_name = hashy.name.upper() + b64ed = encodebytes(hashy.digest()) + cleaned = u(b64ed).strip().rstrip("=") # yes, OpenSSH does this too! + return f"{hash_name}:{cleaned}" + + def get_base64(self): + """ + Return a base64 string containing the public part of this key. Nothing + secret is revealed. This format is compatible with that used to store + public key files or recognized host keys. + + :return: a base64 `string ` containing the public part of the key. + """ + return u(encodebytes(self.asbytes())).replace("\n", "") + + def sign_ssh_data(self, data, algorithm=None): + """ + Sign a blob of data with this private key, and return a `.Message` + representing an SSH signature message. + + :param bytes data: + the data to sign. + :param str algorithm: + the signature algorithm to use, if different from the key's + internal name. Default: ``None``. + :return: an SSH signature `message <.Message>`. + + .. versionchanged:: 2.9 + Added the ``algorithm`` kwarg. + """ + return bytes() + + def verify_ssh_sig(self, data, msg): + """ + Given a blob of data, and an SSH message representing a signature of + that data, verify that it was signed with this key. + + :param bytes data: the data that was signed. + :param .Message msg: an SSH signature message + :return: + ``True`` if the signature verifies correctly; ``False`` otherwise. + """ + return False + + @classmethod + def from_private_key_file(cls, filename, password=None): + """ + Create a key object by reading a private key file. If the private + key is encrypted and ``password`` is not ``None``, the given password + will be used to decrypt the key (otherwise `.PasswordRequiredException` + is thrown). Through the magic of Python, this factory method will + exist in all subclasses of PKey (such as `.RSAKey`), but + is useless on the abstract PKey class. + + :param str filename: name of the file to read + :param str password: + an optional password to use to decrypt the key file, if it's + encrypted + :return: a new `.PKey` based on the given private key + + :raises: ``IOError`` -- if there was an error reading the file + :raises: `.PasswordRequiredException` -- if the private key file is + encrypted, and ``password`` is ``None`` + :raises: `.SSHException` -- if the key file is invalid + """ + key = cls(filename=filename, password=password) + return key + + @classmethod + def from_private_key(cls, file_obj, password=None): + """ + Create a key object by reading a private key from a file (or file-like) + object. If the private key is encrypted and ``password`` is not + ``None``, the given password will be used to decrypt the key (otherwise + `.PasswordRequiredException` is thrown). + + :param file_obj: the file-like object to read from + :param str password: + an optional password to use to decrypt the key, if it's encrypted + :return: a new `.PKey` based on the given private key + + :raises: ``IOError`` -- if there was an error reading the key + :raises: `.PasswordRequiredException` -- + if the private key file is encrypted, and ``password`` is ``None`` + :raises: `.SSHException` -- if the key file is invalid + """ + key = cls(file_obj=file_obj, password=password) + return key + + def write_private_key_file(self, filename, password=None): + """ + Write private key contents into a file. If the password is not + ``None``, the key is encrypted before writing. + + :param str filename: name of the file to write + :param str password: + an optional password to use to encrypt the key file + + :raises: ``IOError`` -- if there was an error writing the file + :raises: `.SSHException` -- if the key is invalid + """ + raise Exception("Not implemented in PKey") + + def write_private_key(self, file_obj, password=None): + """ + Write private key contents into a file (or file-like) object. If the + password is not ``None``, the key is encrypted before writing. + + :param file_obj: the file-like object to write into + :param str password: an optional password to use to encrypt the key + + :raises: ``IOError`` -- if there was an error writing to the file + :raises: `.SSHException` -- if the key is invalid + """ + # TODO 4.0: NotImplementedError (plus everywhere else in here) + raise Exception("Not implemented in PKey") + + def _read_private_key_file(self, tag, filename, password=None): + """ + Read an SSH2-format private key file, looking for a string of the type + ``"BEGIN xxx PRIVATE KEY"`` for some ``xxx``, base64-decode the text we + find, and return it as a string. If the private key is encrypted and + ``password`` is not ``None``, the given password will be used to + decrypt the key (otherwise `.PasswordRequiredException` is thrown). + + :param str tag: + ``"RSA"`` (or etc), the tag used to mark the data block. + :param str filename: + name of the file to read. + :param str password: + an optional password to use to decrypt the key file, if it's + encrypted. + :return: + the `bytes` that make up the private key. + + :raises: ``IOError`` -- if there was an error reading the file. + :raises: `.PasswordRequiredException` -- if the private key file is + encrypted, and ``password`` is ``None``. + :raises: `.SSHException` -- if the key file is invalid. + """ + with open(filename, "r") as f: + data = self._read_private_key(tag, f, password) + return data + + def _read_private_key(self, tag, f, password=None): + lines = f.readlines() + if not lines: + raise SSHException("no lines in {} private key file".format(tag)) + + # find the BEGIN tag + start = 0 + m = self.BEGIN_TAG.match(lines[start]) + line_range = len(lines) - 1 + while start < line_range and not m: + start += 1 + m = self.BEGIN_TAG.match(lines[start]) + start += 1 + keytype = m.group(1) if m else None + if start >= len(lines) or keytype is None: + raise SSHException("not a valid {} private key file".format(tag)) + + # find the END tag + end = start + m = self.END_TAG.match(lines[end]) + while end < line_range and not m: + end += 1 + m = self.END_TAG.match(lines[end]) + + if keytype == tag: + data = self._read_private_key_pem(lines, end, password) + pkformat = self._PRIVATE_KEY_FORMAT_ORIGINAL + elif keytype == "OPENSSH": + data = self._read_private_key_openssh(lines[start:end], password) + pkformat = self._PRIVATE_KEY_FORMAT_OPENSSH + else: + raise SSHException( + "encountered {} key, expected {} key".format(keytype, tag) + ) + + return pkformat, data + + def _got_bad_key_format_id(self, id_): + err = "{}._read_private_key() spat out an unknown key format id '{}'" + raise SSHException(err.format(self.__class__.__name__, id_)) + + def _read_private_key_pem(self, lines, end, password): + start = 0 + # parse any headers first + headers = {} + start += 1 + while start < len(lines): + line = lines[start].split(": ") + if len(line) == 1: + break + headers[line[0].lower()] = line[1].strip() + start += 1 + # if we trudged to the end of the file, just try to cope. + try: + data = decodebytes(b("".join(lines[start:end]))) + except base64.binascii.Error as e: + raise SSHException("base64 decoding error: {}".format(e)) + if "proc-type" not in headers: + # unencryped: done + return data + # encrypted keyfile: will need a password + proc_type = headers["proc-type"] + if proc_type != "4,ENCRYPTED": + raise SSHException( + 'Unknown private key structure "{}"'.format(proc_type) + ) + try: + encryption_type, saltstr = headers["dek-info"].split(",") + except: + raise SSHException("Can't parse DEK-info in private key file") + if encryption_type not in self._CIPHER_TABLE: + raise SSHException( + 'Unknown private key cipher "{}"'.format(encryption_type) + ) + # if no password was passed in, + # raise an exception pointing out that we need one + if password is None: + raise PasswordRequiredException("Private key file is encrypted") + cipher = self._CIPHER_TABLE[encryption_type]["cipher"] + keysize = self._CIPHER_TABLE[encryption_type]["keysize"] + mode = self._CIPHER_TABLE[encryption_type]["mode"] + salt = unhexlify(b(saltstr)) + key = util.generate_key_bytes(md5, salt, password, keysize) + decryptor = Cipher( + cipher(key), mode(salt), backend=default_backend() + ).decryptor() + decrypted_data = decryptor.update(data) + decryptor.finalize() + unpadder = padding.PKCS7(cipher.block_size).unpadder() + try: + return unpadder.update(decrypted_data) + unpadder.finalize() + except ValueError: + raise SSHException("Bad password or corrupt private key file") + + def _read_private_key_openssh(self, lines, password): + """ + Read the new OpenSSH SSH2 private key format available + since OpenSSH version 6.5 + Reference: + https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.key + """ + try: + data = decodebytes(b("".join(lines))) + except base64.binascii.Error as e: + raise SSHException("base64 decoding error: {}".format(e)) + + # read data struct + auth_magic = data[:15] + if auth_magic != OPENSSH_AUTH_MAGIC: + raise SSHException("unexpected OpenSSH key header encountered") + + cstruct = self._uint32_cstruct_unpack(data[15:], "sssur") + cipher, kdfname, kdf_options, num_pubkeys, remainder = cstruct + # For now, just support 1 key. + if num_pubkeys > 1: + raise SSHException( + "unsupported: private keyfile has multiple keys" + ) + pubkey, privkey_blob = self._uint32_cstruct_unpack(remainder, "ss") + + if kdfname == b("bcrypt"): + if cipher == b("aes256-cbc"): + mode = modes.CBC + elif cipher == b("aes256-ctr"): + mode = modes.CTR + else: + raise SSHException( + "unknown cipher `{}` used in private key file".format( + cipher.decode("utf-8") + ) + ) + # Encrypted private key. + # If no password was passed in, raise an exception pointing + # out that we need one + if password is None: + raise PasswordRequiredException( + "private key file is encrypted" + ) + + # Unpack salt and rounds from kdfoptions + salt, rounds = self._uint32_cstruct_unpack(kdf_options, "su") + + # run bcrypt kdf to derive key and iv/nonce (32 + 16 bytes) + key_iv = bcrypt.kdf( + b(password), + b(salt), + 48, + rounds, + # We can't control how many rounds are on disk, so no sense + # warning about it. + ignore_few_rounds=True, + ) + key = key_iv[:32] + iv = key_iv[32:] + + # decrypt private key blob + decryptor = Cipher( + algorithms.AES(key), mode(iv), default_backend() + ).decryptor() + decrypted_privkey = decryptor.update(privkey_blob) + decrypted_privkey += decryptor.finalize() + elif cipher == b("none") and kdfname == b("none"): + # Unencrypted private key + decrypted_privkey = privkey_blob + else: + raise SSHException( + "unknown cipher or kdf used in private key file" + ) + + # Unpack private key and verify checkints + cstruct = self._uint32_cstruct_unpack(decrypted_privkey, "uusr") + checkint1, checkint2, keytype, keydata = cstruct + + if checkint1 != checkint2: + raise SSHException( + "OpenSSH private key file checkints do not match" + ) + + return _unpad_openssh(keydata) + + def _uint32_cstruct_unpack(self, data, strformat): + """ + Used to read new OpenSSH private key format. + Unpacks a c data structure containing a mix of 32-bit uints and + variable length strings prefixed by 32-bit uint size field, + according to the specified format. Returns the unpacked vars + in a tuple. + Format strings: + s - denotes a string + i - denotes a long integer, encoded as a byte string + u - denotes a 32-bit unsigned integer + r - the remainder of the input string, returned as a string + """ + arr = [] + idx = 0 + try: + for f in strformat: + if f == "s": + # string + s_size = struct.unpack(">L", data[idx : idx + 4])[0] + idx += 4 + s = data[idx : idx + s_size] + idx += s_size + arr.append(s) + if f == "i": + # long integer + s_size = struct.unpack(">L", data[idx : idx + 4])[0] + idx += 4 + s = data[idx : idx + s_size] + idx += s_size + i = util.inflate_long(s, True) + arr.append(i) + elif f == "u": + # 32-bit unsigned int + u = struct.unpack(">L", data[idx : idx + 4])[0] + idx += 4 + arr.append(u) + elif f == "r": + # remainder as string + s = data[idx:] + arr.append(s) + break + except Exception as e: + # PKey-consuming code frequently wants to save-and-skip-over issues + # with loading keys, and uses SSHException as the (really friggin + # awful) signal for this. So for now...we do this. + raise SSHException(str(e)) + return tuple(arr) + + def _write_private_key_file(self, filename, key, format, password=None): + """ + Write an SSH2-format private key file in a form that can be read by + paramiko or openssh. If no password is given, the key is written in + a trivially-encoded format (base64) which is completely insecure. If + a password is given, DES-EDE3-CBC is used. + + :param str tag: + ``"RSA"`` or etc, the tag used to mark the data block. + :param filename: name of the file to write. + :param bytes data: data blob that makes up the private key. + :param str password: an optional password to use to encrypt the file. + + :raises: ``IOError`` -- if there was an error writing the file. + """ + # Ensure that we create new key files directly with a user-only mode, + # instead of opening, writing, then chmodding, which leaves us open to + # CVE-2022-24302. + with os.fdopen( + os.open( + filename, + # NOTE: O_TRUNC is a noop on new files, and O_CREAT is a noop + # on existing files, so using all 3 in both cases is fine. + flags=os.O_WRONLY | os.O_TRUNC | os.O_CREAT, + # Ditto the use of the 'mode' argument; it should be safe to + # give even for existing files (though it will not act like a + # chmod in that case). + mode=o600, + ), + # Yea, you still gotta inform the FLO that it is in "write" mode. + "w", + ) as f: + self._write_private_key(f, key, format, password=password) + + def _write_private_key(self, f, key, format, password=None): + if password is None: + encryption = serialization.NoEncryption() + else: + encryption = serialization.BestAvailableEncryption(b(password)) + + f.write( + key.private_bytes( + serialization.Encoding.PEM, format, encryption + ).decode() + ) + + def _check_type_and_load_cert(self, msg, key_type, cert_type): + """ + Perform message type-checking & optional certificate loading. + + This includes fast-forwarding cert ``msg`` objects past the nonce, so + that the subsequent fields are the key numbers; thus the caller may + expect to treat the message as key material afterwards either way. + + The obtained key type is returned for classes which need to know what + it was (e.g. ECDSA.) + """ + # Normalization; most classes have a single key type and give a string, + # but eg ECDSA is a 1:N mapping. + key_types = key_type + cert_types = cert_type + if isinstance(key_type, str): + key_types = [key_types] + if isinstance(cert_types, str): + cert_types = [cert_types] + # Can't do much with no message, that should've been handled elsewhere + if msg is None: + raise SSHException("Key object may not be empty") + # First field is always key type, in either kind of object. (make sure + # we rewind before grabbing it - sometimes caller had to do their own + # introspection first!) + msg.rewind() + type_ = msg.get_text() + # Regular public key - nothing special to do besides the implicit + # type check. + if type_ in key_types: + pass + # OpenSSH-compatible certificate - store full copy as .public_blob + # (so signing works correctly) and then fast-forward past the + # nonce. + elif type_ in cert_types: + # This seems the cleanest way to 'clone' an already-being-read + # message; they're *IO objects at heart and their .getvalue() + # always returns the full value regardless of pointer position. + self.load_certificate(Message(msg.asbytes())) + # Read out nonce as it comes before the public numbers - our caller + # is likely going to use the (only borrowed by us, not owned) + # 'msg' object for loading those numbers right after this. + # TODO: usefully interpret it & other non-public-number fields + # (requires going back into per-type subclasses.) + msg.get_string() + else: + err = "Invalid key (class: {}, data type: {}" + raise SSHException(err.format(self.__class__.__name__, type_)) + + def load_certificate(self, value): + """ + Supplement the private key contents with data loaded from an OpenSSH + public key (``.pub``) or certificate (``-cert.pub``) file, a string + containing such a file, or a `.Message` object. + + The .pub contents adds no real value, since the private key + file includes sufficient information to derive the public + key info. For certificates, however, this can be used on + the client side to offer authentication requests to the server + based on certificate instead of raw public key. + + See: + https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.certkeys + + Note: very little effort is made to validate the certificate contents, + that is for the server to decide if it is good enough to authenticate + successfully. + """ + if isinstance(value, Message): + constructor = "from_message" + elif os.path.isfile(value): + constructor = "from_file" + else: + constructor = "from_string" + blob = getattr(PublicBlob, constructor)(value) + if not blob.key_type.startswith(self.get_name()): + err = "PublicBlob type {} incompatible with key type {}" + raise ValueError(err.format(blob.key_type, self.get_name())) + self.public_blob = blob + + +# General construct for an OpenSSH style Public Key blob +# readable from a one-line file of the format: +# [] +# Of little value in the case of standard public keys +# {ssh-rsa, ssh-ecdsa, ssh-ed25519}, but should +# provide rudimentary support for {*-cert.v01} +class PublicBlob: + """ + OpenSSH plain public key or OpenSSH signed public key (certificate). + + Tries to be as dumb as possible and barely cares about specific + per-key-type data. + + .. note:: + + Most of the time you'll want to call `from_file`, `from_string` or + `from_message` for useful instantiation, the main constructor is + basically "I should be using ``attrs`` for this." + """ + + def __init__(self, type_, blob, comment=None): + """ + Create a new public blob of given type and contents. + + :param str type_: Type indicator, eg ``ssh-rsa``. + :param bytes blob: The blob bytes themselves. + :param str comment: A comment, if one was given (e.g. file-based.) + """ + self.key_type = type_ + self.key_blob = blob + self.comment = comment + + @classmethod + def from_file(cls, filename): + """ + Create a public blob from a ``-cert.pub``-style file on disk. + """ + with open(filename) as f: + string = f.read() + return cls.from_string(string) + + @classmethod + def from_string(cls, string): + """ + Create a public blob from a ``-cert.pub``-style string. + """ + fields = string.split(None, 2) + if len(fields) < 2: + msg = "Not enough fields for public blob: {}" + raise ValueError(msg.format(fields)) + key_type = fields[0] + key_blob = decodebytes(b(fields[1])) + try: + comment = fields[2].strip() + except IndexError: + comment = None + # Verify that the blob message first (string) field matches the + # key_type + m = Message(key_blob) + blob_type = m.get_text() + if blob_type != key_type: + deets = "key type={!r}, but blob type={!r}".format( + key_type, blob_type + ) + raise ValueError("Invalid PublicBlob contents: {}".format(deets)) + # All good? All good. + return cls(type_=key_type, blob=key_blob, comment=comment) + + @classmethod + def from_message(cls, message): + """ + Create a public blob from a network `.Message`. + + Specifically, a cert-bearing pubkey auth packet, because by definition + OpenSSH-style certificates 'are' their own network representation." + """ + type_ = message.get_text() + return cls(type_=type_, blob=message.asbytes()) + + def __str__(self): + ret = "{} public key/certificate".format(self.key_type) + if self.comment: + ret += "- {}".format(self.comment) + return ret + + def __eq__(self, other): + # Just piggyback on Message/BytesIO, since both of these should be one. + return self and other and self.key_blob == other.key_blob + + def __ne__(self, other): + return not self == other diff --git a/.venv/lib/python3.9/site-packages/paramiko/primes.py b/.venv/lib/python3.9/site-packages/paramiko/primes.py new file mode 100644 index 0000000..663c58e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/primes.py @@ -0,0 +1,148 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Utility functions for dealing with primes. +""" + +import os + +from paramiko import util +from paramiko.common import byte_mask +from paramiko.ssh_exception import SSHException + + +def _roll_random(n): + """returns a random # from 0 to N-1""" + bits = util.bit_length(n - 1) + byte_count = (bits + 7) // 8 + hbyte_mask = pow(2, bits % 8) - 1 + + # so here's the plan: + # we fetch as many random bits as we'd need to fit N-1, and if the + # generated number is >= N, we try again. in the worst case (N-1 is a + # power of 2), we have slightly better than 50% odds of getting one that + # fits, so i can't guarantee that this loop will ever finish, but the odds + # of it looping forever should be infinitesimal. + while True: + x = os.urandom(byte_count) + if hbyte_mask > 0: + x = byte_mask(x[0], hbyte_mask) + x[1:] + num = util.inflate_long(x, 1) + if num < n: + break + return num + + +class ModulusPack: + """ + convenience object for holding the contents of the /etc/ssh/moduli file, + on systems that have such a file. + """ + + def __init__(self): + # pack is a hash of: bits -> [ (generator, modulus) ... ] + self.pack = {} + self.discarded = [] + + def _parse_modulus(self, line): + ( + timestamp, + mod_type, + tests, + tries, + size, + generator, + modulus, + ) = line.split() + mod_type = int(mod_type) + tests = int(tests) + tries = int(tries) + size = int(size) + generator = int(generator) + modulus = int(modulus, 16) + + # weed out primes that aren't at least: + # type 2 (meets basic structural requirements) + # test 4 (more than just a small-prime sieve) + # tries < 100 if test & 4 (at least 100 tries of miller-rabin) + if ( + mod_type < 2 + or tests < 4 + or (tests & 4 and tests < 8 and tries < 100) + ): + self.discarded.append( + (modulus, "does not meet basic requirements") + ) + return + if generator == 0: + generator = 2 + + # there's a bug in the ssh "moduli" file (yeah, i know: shock! dismay! + # call cnn!) where it understates the bit lengths of these primes by 1. + # this is okay. + bl = util.bit_length(modulus) + if (bl != size) and (bl != size + 1): + self.discarded.append( + (modulus, "incorrectly reported bit length {}".format(size)) + ) + return + if bl not in self.pack: + self.pack[bl] = [] + self.pack[bl].append((generator, modulus)) + + def read_file(self, filename): + """ + :raises IOError: passed from any file operations that fail. + """ + self.pack = {} + with open(filename, "r") as f: + for line in f: + line = line.strip() + if (len(line) == 0) or (line[0] == "#"): + continue + try: + self._parse_modulus(line) + except: + continue + + def get_modulus(self, min, prefer, max): + bitsizes = sorted(self.pack.keys()) + if len(bitsizes) == 0: + raise SSHException("no moduli available") + good = -1 + # find nearest bitsize >= preferred + for b in bitsizes: + if (b >= prefer) and (b <= max) and (b < good or good == -1): + good = b + # if that failed, find greatest bitsize >= min + if good == -1: + for b in bitsizes: + if (b >= min) and (b <= max) and (b > good): + good = b + if good == -1: + # their entire (min, max) range has no intersection with our range. + # if their range is below ours, pick the smallest. otherwise pick + # the largest. it'll be out of their range requirement either way, + # but we'll be sending them the closest one we have. + good = bitsizes[0] + if min > good: + good = bitsizes[-1] + # now pick a random modulus of this bitsize + n = _roll_random(len(self.pack[good])) + return self.pack[good][n] diff --git a/.venv/lib/python3.9/site-packages/paramiko/proxy.py b/.venv/lib/python3.9/site-packages/paramiko/proxy.py new file mode 100644 index 0000000..f7609c9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/proxy.py @@ -0,0 +1,134 @@ +# Copyright (C) 2012 Yipit, Inc +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +import os +import shlex +import signal +from select import select +import socket +import time + +# Try-and-ignore import so platforms w/o subprocess (eg Google App Engine) can +# still import paramiko. +subprocess, subprocess_import_error = None, None +try: + import subprocess +except ImportError as e: + subprocess_import_error = e + +from paramiko.ssh_exception import ProxyCommandFailure +from paramiko.util import ClosingContextManager + + +class ProxyCommand(ClosingContextManager): + """ + Wraps a subprocess running ProxyCommand-driven programs. + + This class implements a the socket-like interface needed by the + `.Transport` and `.Packetizer` classes. Using this class instead of a + regular socket makes it possible to talk with a Popen'd command that will + proxy traffic between the client and a server hosted in another machine. + + Instances of this class may be used as context managers. + """ + + def __init__(self, command_line): + """ + Create a new CommandProxy instance. The instance created by this + class can be passed as an argument to the `.Transport` class. + + :param str command_line: + the command that should be executed and used as the proxy. + """ + if subprocess is None: + raise subprocess_import_error + self.cmd = shlex.split(command_line) + self.process = subprocess.Popen( + self.cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=0, + ) + self.timeout = None + + def send(self, content): + """ + Write the content received from the SSH client to the standard + input of the forked command. + + :param str content: string to be sent to the forked command + """ + try: + self.process.stdin.write(content) + except IOError as e: + # There was a problem with the child process. It probably + # died and we can't proceed. The best option here is to + # raise an exception informing the user that the informed + # ProxyCommand is not working. + raise ProxyCommandFailure(" ".join(self.cmd), e.strerror) + return len(content) + + def recv(self, size): + """ + Read from the standard output of the forked program. + + :param int size: how many chars should be read + + :return: the string of bytes read, which may be shorter than requested + """ + try: + buffer = b"" + start = time.time() + while len(buffer) < size: + select_timeout = None + if self.timeout is not None: + elapsed = time.time() - start + if elapsed >= self.timeout: + raise socket.timeout() + select_timeout = self.timeout - elapsed + + r, w, x = select([self.process.stdout], [], [], select_timeout) + if r and r[0] == self.process.stdout: + buffer += os.read( + self.process.stdout.fileno(), size - len(buffer) + ) + return buffer + except socket.timeout: + if buffer: + # Don't raise socket.timeout, return partial result instead + return buffer + raise # socket.timeout is a subclass of IOError + except IOError as e: + raise ProxyCommandFailure(" ".join(self.cmd), e.strerror) + + def close(self): + os.kill(self.process.pid, signal.SIGTERM) + + @property + def closed(self): + return self.process.returncode is not None + + @property + def _closed(self): + # Concession to Python 3 socket-like API + return self.closed + + def settimeout(self, timeout): + self.timeout = timeout diff --git a/.venv/lib/python3.9/site-packages/paramiko/rsakey.py b/.venv/lib/python3.9/site-packages/paramiko/rsakey.py new file mode 100644 index 0000000..b7ad3ce --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/rsakey.py @@ -0,0 +1,227 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +RSA keys. +""" + +from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import rsa, padding + +from paramiko.message import Message +from paramiko.pkey import PKey +from paramiko.ssh_exception import SSHException + + +class RSAKey(PKey): + """ + Representation of an RSA key which can be used to sign and verify SSH2 + data. + """ + + name = "ssh-rsa" + HASHES = { + "ssh-rsa": hashes.SHA1, + "ssh-rsa-cert-v01@openssh.com": hashes.SHA1, + "rsa-sha2-256": hashes.SHA256, + "rsa-sha2-256-cert-v01@openssh.com": hashes.SHA256, + "rsa-sha2-512": hashes.SHA512, + "rsa-sha2-512-cert-v01@openssh.com": hashes.SHA512, + } + + def __init__( + self, + msg=None, + data=None, + filename=None, + password=None, + key=None, + file_obj=None, + ): + self.key = None + self.public_blob = None + if file_obj is not None: + self._from_private_key(file_obj, password) + return + if filename is not None: + self._from_private_key_file(filename, password) + return + if (msg is None) and (data is not None): + msg = Message(data) + if key is not None: + self.key = key + else: + self._check_type_and_load_cert( + msg=msg, + # NOTE: this does NOT change when using rsa2 signatures; it's + # purely about key loading, not exchange or verification + key_type=self.name, + cert_type="ssh-rsa-cert-v01@openssh.com", + ) + self.key = rsa.RSAPublicNumbers( + e=msg.get_mpint(), n=msg.get_mpint() + ).public_key(default_backend()) + + @classmethod + def identifiers(cls): + return list(cls.HASHES.keys()) + + @property + def size(self): + return self.key.key_size + + @property + def public_numbers(self): + if isinstance(self.key, rsa.RSAPrivateKey): + return self.key.private_numbers().public_numbers + else: + return self.key.public_numbers() + + def asbytes(self): + m = Message() + m.add_string(self.name) + m.add_mpint(self.public_numbers.e) + m.add_mpint(self.public_numbers.n) + return m.asbytes() + + def __str__(self): + # NOTE: see #853 to explain some legacy behavior. + # TODO 4.0: replace with a nice clean fingerprint display or something + return self.asbytes().decode("utf8", errors="ignore") + + @property + def _fields(self): + return (self.get_name(), self.public_numbers.e, self.public_numbers.n) + + def get_name(self): + return self.name + + def get_bits(self): + return self.size + + def can_sign(self): + return isinstance(self.key, rsa.RSAPrivateKey) + + def sign_ssh_data(self, data, algorithm=None): + if algorithm is None: + algorithm = self.name + sig = self.key.sign( + data, + padding=padding.PKCS1v15(), + # HASHES being just a map from long identifier to either SHA1 or + # SHA256 - cert'ness is not truly relevant. + algorithm=self.HASHES[algorithm](), + ) + m = Message() + # And here again, cert'ness is irrelevant, so it is stripped out. + m.add_string(algorithm.replace("-cert-v01@openssh.com", "")) + m.add_string(sig) + return m + + def verify_ssh_sig(self, data, msg): + sig_algorithm = msg.get_text() + if sig_algorithm not in self.HASHES: + return False + key = self.key + if isinstance(key, rsa.RSAPrivateKey): + key = key.public_key() + + # NOTE: pad received signature with leading zeros, key.verify() + # expects a signature of key size (e.g. PuTTY doesn't pad) + sign = msg.get_binary() + diff = key.key_size - len(sign) * 8 + if diff > 0: + sign = b"\x00" * ((diff + 7) // 8) + sign + + try: + key.verify( + sign, data, padding.PKCS1v15(), self.HASHES[sig_algorithm]() + ) + except InvalidSignature: + return False + else: + return True + + def write_private_key_file(self, filename, password=None): + self._write_private_key_file( + filename, + self.key, + serialization.PrivateFormat.TraditionalOpenSSL, + password=password, + ) + + def write_private_key(self, file_obj, password=None): + self._write_private_key( + file_obj, + self.key, + serialization.PrivateFormat.TraditionalOpenSSL, + password=password, + ) + + @staticmethod + def generate(bits, progress_func=None): + """ + Generate a new private RSA key. This factory function can be used to + generate a new host key or authentication key. + + :param int bits: number of bits the generated key should be. + :param progress_func: Unused + :return: new `.RSAKey` private key + """ + key = rsa.generate_private_key( + public_exponent=65537, key_size=bits, backend=default_backend() + ) + return RSAKey(key=key) + + # ...internals... + + def _from_private_key_file(self, filename, password): + data = self._read_private_key_file("RSA", filename, password) + self._decode_key(data) + + def _from_private_key(self, file_obj, password): + data = self._read_private_key("RSA", file_obj, password) + self._decode_key(data) + + def _decode_key(self, data): + pkformat, data = data + if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL: + try: + key = serialization.load_der_private_key( + data, password=None, backend=default_backend() + ) + except (ValueError, TypeError, UnsupportedAlgorithm) as e: + raise SSHException(str(e)) + elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH: + n, e, d, iqmp, p, q = self._uint32_cstruct_unpack(data, "iiiiii") + public_numbers = rsa.RSAPublicNumbers(e=e, n=n) + key = rsa.RSAPrivateNumbers( + p=p, + q=q, + d=d, + dmp1=d % (p - 1), + dmq1=d % (q - 1), + iqmp=iqmp, + public_numbers=public_numbers, + ).private_key(default_backend()) + else: + self._got_bad_key_format_id(pkformat) + assert isinstance(key, rsa.RSAPrivateKey) + self.key = key diff --git a/.venv/lib/python3.9/site-packages/paramiko/server.py b/.venv/lib/python3.9/site-packages/paramiko/server.py new file mode 100644 index 0000000..6923bdf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/server.py @@ -0,0 +1,732 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +`.ServerInterface` is an interface to override for server support. +""" + +import threading +from paramiko import util +from paramiko.common import ( + DEBUG, + ERROR, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + AUTH_FAILED, + AUTH_SUCCESSFUL, +) + + +class ServerInterface: + """ + This class defines an interface for controlling the behavior of Paramiko + in server mode. + + Methods on this class are called from Paramiko's primary thread, so you + shouldn't do too much work in them. (Certainly nothing that blocks or + sleeps.) + """ + + def check_channel_request(self, kind, chanid): + """ + Determine if a channel request of a given type will be granted, and + return ``OPEN_SUCCEEDED`` or an error code. This method is + called in server mode when the client requests a channel, after + authentication is complete. + + If you allow channel requests (and an ssh server that didn't would be + useless), you should also override some of the channel request methods + below, which are used to determine which services will be allowed on + a given channel: + + - `check_channel_pty_request` + - `check_channel_shell_request` + - `check_channel_subsystem_request` + - `check_channel_window_change_request` + - `check_channel_x11_request` + - `check_channel_forward_agent_request` + + The ``chanid`` parameter is a small number that uniquely identifies the + channel within a `.Transport`. A `.Channel` object is not created + unless this method returns ``OPEN_SUCCEEDED`` -- once a + `.Channel` object is created, you can call `.Channel.get_id` to + retrieve the channel ID. + + The return value should either be ``OPEN_SUCCEEDED`` (or + ``0``) to allow the channel request, or one of the following error + codes to reject it: + + - ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED`` + - ``OPEN_FAILED_CONNECT_FAILED`` + - ``OPEN_FAILED_UNKNOWN_CHANNEL_TYPE`` + - ``OPEN_FAILED_RESOURCE_SHORTAGE`` + + The default implementation always returns + ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED``. + + :param str kind: + the kind of channel the client would like to open (usually + ``"session"``). + :param int chanid: ID of the channel + :return: an `int` success or failure code (listed above) + """ + return OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED + + def get_allowed_auths(self, username): + """ + Return a list of authentication methods supported by the server. + This list is sent to clients attempting to authenticate, to inform them + of authentication methods that might be successful. + + The "list" is actually a string of comma-separated names of types of + authentication. Possible values are ``"password"``, ``"publickey"``, + and ``"none"``. + + The default implementation always returns ``"password"``. + + :param str username: the username requesting authentication. + :return: a comma-separated `str` of authentication types + """ + return "password" + + def check_auth_none(self, username): + """ + Determine if a client may open channels with no (further) + authentication. + + Return ``AUTH_FAILED`` if the client must authenticate, or + ``AUTH_SUCCESSFUL`` if it's okay for the client to not + authenticate. + + The default implementation always returns ``AUTH_FAILED``. + + :param str username: the username of the client. + :return: + ``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if + it succeeds. + :rtype: int + """ + return AUTH_FAILED + + def check_auth_password(self, username, password): + """ + Determine if a given username and password supplied by the client is + acceptable for use in authentication. + + Return ``AUTH_FAILED`` if the password is not accepted, + ``AUTH_SUCCESSFUL`` if the password is accepted and completes + the authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your + authentication is stateful, and this key is accepted for + authentication, but more authentication is required. (In this latter + case, `get_allowed_auths` will be called to report to the client what + options it has for continuing the authentication.) + + The default implementation always returns ``AUTH_FAILED``. + + :param str username: the username of the authenticating client. + :param str password: the password given by the client. + :return: + ``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if + it succeeds; ``AUTH_PARTIALLY_SUCCESSFUL`` if the password auth is + successful, but authentication must continue. + :rtype: int + """ + return AUTH_FAILED + + def check_auth_publickey(self, username, key): + """ + Determine if a given key supplied by the client is acceptable for use + in authentication. You should override this method in server mode to + check the username and key and decide if you would accept a signature + made using this key. + + Return ``AUTH_FAILED`` if the key is not accepted, + ``AUTH_SUCCESSFUL`` if the key is accepted and completes the + authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your + authentication is stateful, and this password is accepted for + authentication, but more authentication is required. (In this latter + case, `get_allowed_auths` will be called to report to the client what + options it has for continuing the authentication.) + + Note that you don't have to actually verify any key signtature here. + If you're willing to accept the key, Paramiko will do the work of + verifying the client's signature. + + The default implementation always returns ``AUTH_FAILED``. + + :param str username: the username of the authenticating client + :param .PKey key: the key object provided by the client + :return: + ``AUTH_FAILED`` if the client can't authenticate with this key; + ``AUTH_SUCCESSFUL`` if it can; ``AUTH_PARTIALLY_SUCCESSFUL`` if it + can authenticate with this key but must continue with + authentication + :rtype: int + """ + return AUTH_FAILED + + def check_auth_interactive(self, username, submethods): + """ + Begin an interactive authentication challenge, if supported. You + should override this method in server mode if you want to support the + ``"keyboard-interactive"`` auth type, which requires you to send a + series of questions for the client to answer. + + Return ``AUTH_FAILED`` if this auth method isn't supported. Otherwise, + you should return an `.InteractiveQuery` object containing the prompts + and instructions for the user. The response will be sent via a call + to `check_auth_interactive_response`. + + The default implementation always returns ``AUTH_FAILED``. + + :param str username: the username of the authenticating client + :param str submethods: + a comma-separated list of methods preferred by the client (usually + empty) + :return: + ``AUTH_FAILED`` if this auth method isn't supported; otherwise an + object containing queries for the user + :rtype: int or `.InteractiveQuery` + """ + return AUTH_FAILED + + def check_auth_interactive_response(self, responses): + """ + Continue or finish an interactive authentication challenge, if + supported. You should override this method in server mode if you want + to support the ``"keyboard-interactive"`` auth type. + + Return ``AUTH_FAILED`` if the responses are not accepted, + ``AUTH_SUCCESSFUL`` if the responses are accepted and complete + the authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your + authentication is stateful, and this set of responses is accepted for + authentication, but more authentication is required. (In this latter + case, `get_allowed_auths` will be called to report to the client what + options it has for continuing the authentication.) + + If you wish to continue interactive authentication with more questions, + you may return an `.InteractiveQuery` object, which should cause the + client to respond with more answers, calling this method again. This + cycle can continue indefinitely. + + The default implementation always returns ``AUTH_FAILED``. + + :param responses: list of `str` responses from the client + :return: + ``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if + it succeeds; ``AUTH_PARTIALLY_SUCCESSFUL`` if the interactive auth + is successful, but authentication must continue; otherwise an + object containing queries for the user + :rtype: int or `.InteractiveQuery` + """ + return AUTH_FAILED + + def check_auth_gssapi_with_mic( + self, username, gss_authenticated=AUTH_FAILED, cc_file=None + ): + """ + Authenticate the given user to the server if he is a valid krb5 + principal. + + :param str username: The username of the authenticating client + :param int gss_authenticated: The result of the krb5 authentication + :param str cc_filename: The krb5 client credentials cache filename + :return: ``AUTH_FAILED`` if the user is not authenticated otherwise + ``AUTH_SUCCESSFUL`` + :rtype: int + :note: Kerberos credential delegation is not supported. + :see: `.ssh_gss` + :note: : We are just checking in L{AuthHandler} that the given user is + a valid krb5 principal! + We don't check if the krb5 principal is allowed to log in on + the server, because there is no way to do that in python. So + if you develop your own SSH server with paramiko for a certain + platform like Linux, you should call C{krb5_kuserok()} in + your local kerberos library to make sure that the + krb5_principal has an account on the server and is allowed to + log in as a user. + :see: http://www.unix.com/man-page/all/3/krb5_kuserok/ + """ + if gss_authenticated == AUTH_SUCCESSFUL: + return AUTH_SUCCESSFUL + return AUTH_FAILED + + def check_auth_gssapi_keyex( + self, username, gss_authenticated=AUTH_FAILED, cc_file=None + ): + """ + Authenticate the given user to the server if he is a valid krb5 + principal and GSS-API Key Exchange was performed. + If GSS-API Key Exchange was not performed, this authentication method + won't be available. + + :param str username: The username of the authenticating client + :param int gss_authenticated: The result of the krb5 authentication + :param str cc_filename: The krb5 client credentials cache filename + :return: ``AUTH_FAILED`` if the user is not authenticated otherwise + ``AUTH_SUCCESSFUL`` + :rtype: int + :note: Kerberos credential delegation is not supported. + :see: `.ssh_gss` `.kex_gss` + :note: : We are just checking in L{AuthHandler} that the given user is + a valid krb5 principal! + We don't check if the krb5 principal is allowed to log in on + the server, because there is no way to do that in python. So + if you develop your own SSH server with paramiko for a certain + platform like Linux, you should call C{krb5_kuserok()} in + your local kerberos library to make sure that the + krb5_principal has an account on the server and is allowed + to log in as a user. + :see: http://www.unix.com/man-page/all/3/krb5_kuserok/ + """ + if gss_authenticated == AUTH_SUCCESSFUL: + return AUTH_SUCCESSFUL + return AUTH_FAILED + + def enable_auth_gssapi(self): + """ + Overwrite this function in your SSH server to enable GSSAPI + authentication. + The default implementation always returns false. + + :returns bool: Whether GSSAPI authentication is enabled. + :see: `.ssh_gss` + """ + UseGSSAPI = False + return UseGSSAPI + + def check_port_forward_request(self, address, port): + """ + Handle a request for port forwarding. The client is asking that + connections to the given address and port be forwarded back across + this ssh connection. An address of ``"0.0.0.0"`` indicates a global + address (any address associated with this server) and a port of ``0`` + indicates that no specific port is requested (usually the OS will pick + a port). + + The default implementation always returns ``False``, rejecting the + port forwarding request. If the request is accepted, you should return + the port opened for listening. + + :param str address: the requested address + :param int port: the requested port + :return: + the port number (`int`) that was opened for listening, or ``False`` + to reject + """ + return False + + def cancel_port_forward_request(self, address, port): + """ + The client would like to cancel a previous port-forwarding request. + If the given address and port is being forwarded across this ssh + connection, the port should be closed. + + :param str address: the forwarded address + :param int port: the forwarded port + """ + pass + + def check_global_request(self, kind, msg): + """ + Handle a global request of the given ``kind``. This method is called + in server mode and client mode, whenever the remote host makes a global + request. If there are any arguments to the request, they will be in + ``msg``. + + There aren't any useful global requests defined, aside from port + forwarding, so usually this type of request is an extension to the + protocol. + + If the request was successful and you would like to return contextual + data to the remote host, return a tuple. Items in the tuple will be + sent back with the successful result. (Note that the items in the + tuple can only be strings, ints, or bools.) + + The default implementation always returns ``False``, indicating that it + does not support any global requests. + + .. note:: Port forwarding requests are handled separately, in + `check_port_forward_request`. + + :param str kind: the kind of global request being made. + :param .Message msg: any extra arguments to the request. + :return: + ``True`` or a `tuple` of data if the request was granted; ``False`` + otherwise. + """ + return False + + # ...Channel requests... + + def check_channel_pty_request( + self, channel, term, width, height, pixelwidth, pixelheight, modes + ): + """ + Determine if a pseudo-terminal of the given dimensions (usually + requested for shell access) can be provided on the given channel. + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the pty request arrived on. + :param str term: type of terminal requested (for example, ``"vt100"``). + :param int width: width of screen in characters. + :param int height: height of screen in characters. + :param int pixelwidth: + width of screen in pixels, if known (may be ``0`` if unknown). + :param int pixelheight: + height of screen in pixels, if known (may be ``0`` if unknown). + :return: + ``True`` if the pseudo-terminal has been allocated; ``False`` + otherwise. + """ + return False + + def check_channel_shell_request(self, channel): + """ + Determine if a shell will be provided to the client on the given + channel. If this method returns ``True``, the channel should be + connected to the stdin/stdout of a shell (or something that acts like + a shell). + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the request arrived on. + :return: + ``True`` if this channel is now hooked up to a shell; ``False`` if + a shell can't or won't be provided. + """ + return False + + def check_channel_exec_request(self, channel, command): + """ + Determine if a shell command will be executed for the client. If this + method returns ``True``, the channel should be connected to the stdin, + stdout, and stderr of the shell command. + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the request arrived on. + :param str command: the command to execute. + :return: + ``True`` if this channel is now hooked up to the stdin, stdout, and + stderr of the executing command; ``False`` if the command will not + be executed. + + .. versionadded:: 1.1 + """ + return False + + def check_channel_subsystem_request(self, channel, name): + """ + Determine if a requested subsystem will be provided to the client on + the given channel. If this method returns ``True``, all future I/O + through this channel will be assumed to be connected to the requested + subsystem. An example of a subsystem is ``sftp``. + + The default implementation checks for a subsystem handler assigned via + `.Transport.set_subsystem_handler`. + If one has been set, the handler is invoked and this method returns + ``True``. Otherwise it returns ``False``. + + .. note:: Because the default implementation uses the `.Transport` to + identify valid subsystems, you probably won't need to override this + method. + + :param .Channel channel: the `.Channel` the pty request arrived on. + :param str name: name of the requested subsystem. + :return: + ``True`` if this channel is now hooked up to the requested + subsystem; ``False`` if that subsystem can't or won't be provided. + """ + transport = channel.get_transport() + handler_class, args, kwargs = transport._get_subsystem_handler(name) + if handler_class is None: + return False + handler = handler_class(channel, name, self, *args, **kwargs) + handler.start() + return True + + def check_channel_window_change_request( + self, channel, width, height, pixelwidth, pixelheight + ): + """ + Determine if the pseudo-terminal on the given channel can be resized. + This only makes sense if a pty was previously allocated on it. + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the pty request arrived on. + :param int width: width of screen in characters. + :param int height: height of screen in characters. + :param int pixelwidth: + width of screen in pixels, if known (may be ``0`` if unknown). + :param int pixelheight: + height of screen in pixels, if known (may be ``0`` if unknown). + :return: ``True`` if the terminal was resized; ``False`` if not. + """ + return False + + def check_channel_x11_request( + self, + channel, + single_connection, + auth_protocol, + auth_cookie, + screen_number, + ): + """ + Determine if the client will be provided with an X11 session. If this + method returns ``True``, X11 applications should be routed through new + SSH channels, using `.Transport.open_x11_channel`. + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the X11 request arrived on + :param bool single_connection: + ``True`` if only a single X11 channel should be opened, else + ``False``. + :param str auth_protocol: the protocol used for X11 authentication + :param str auth_cookie: the cookie used to authenticate to X11 + :param int screen_number: the number of the X11 screen to connect to + :return: ``True`` if the X11 session was opened; ``False`` if not + """ + return False + + def check_channel_forward_agent_request(self, channel): + """ + Determine if the client will be provided with an forward agent session. + If this method returns ``True``, the server will allow SSH Agent + forwarding. + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the request arrived on + :return: ``True`` if the AgentForward was loaded; ``False`` if not + + If ``True`` is returned, the server should create an + :class:`AgentServerProxy` to access the agent. + """ + return False + + def check_channel_direct_tcpip_request(self, chanid, origin, destination): + """ + Determine if a local port forwarding channel will be granted, and + return ``OPEN_SUCCEEDED`` or an error code. This method is + called in server mode when the client requests a channel, after + authentication is complete. + + The ``chanid`` parameter is a small number that uniquely identifies the + channel within a `.Transport`. A `.Channel` object is not created + unless this method returns ``OPEN_SUCCEEDED`` -- once a + `.Channel` object is created, you can call `.Channel.get_id` to + retrieve the channel ID. + + The origin and destination parameters are (ip_address, port) tuples + that correspond to both ends of the TCP connection in the forwarding + tunnel. + + The return value should either be ``OPEN_SUCCEEDED`` (or + ``0``) to allow the channel request, or one of the following error + codes to reject it: + + - ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED`` + - ``OPEN_FAILED_CONNECT_FAILED`` + - ``OPEN_FAILED_UNKNOWN_CHANNEL_TYPE`` + - ``OPEN_FAILED_RESOURCE_SHORTAGE`` + + The default implementation always returns + ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED``. + + :param int chanid: ID of the channel + :param tuple origin: + 2-tuple containing the IP address and port of the originator + (client side) + :param tuple destination: + 2-tuple containing the IP address and port of the destination + (server side) + :return: an `int` success or failure code (listed above) + """ + return OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED + + def check_channel_env_request(self, channel, name, value): + """ + Check whether a given environment variable can be specified for the + given channel. This method should return ``True`` if the server + is willing to set the specified environment variable. Note that + some environment variables (e.g., PATH) can be exceedingly + dangerous, so blindly allowing the client to set the environment + is almost certainly not a good idea. + + The default implementation always returns ``False``. + + :param channel: the `.Channel` the env request arrived on + :param str name: name + :param str value: Channel value + :returns: A boolean + """ + return False + + def get_banner(self): + """ + A pre-login banner to display to the user. The message may span + multiple lines separated by crlf pairs. The language should be in + rfc3066 style, for example: en-US + + The default implementation always returns ``(None, None)``. + + :returns: A tuple containing the banner and language code. + + .. versionadded:: 2.3 + """ + return (None, None) + + +class InteractiveQuery: + """ + A query (set of prompts) for a user during interactive authentication. + """ + + def __init__(self, name="", instructions="", *prompts): + """ + Create a new interactive query to send to the client. The name and + instructions are optional, but are generally displayed to the end + user. A list of prompts may be included, or they may be added via + the `add_prompt` method. + + :param str name: name of this query + :param str instructions: + user instructions (usually short) about this query + :param str prompts: one or more authentication prompts + """ + self.name = name + self.instructions = instructions + self.prompts = [] + for x in prompts: + if isinstance(x, str): + self.add_prompt(x) + else: + self.add_prompt(x[0], x[1]) + + def add_prompt(self, prompt, echo=True): + """ + Add a prompt to this query. The prompt should be a (reasonably short) + string. Multiple prompts can be added to the same query. + + :param str prompt: the user prompt + :param bool echo: + ``True`` (default) if the user's response should be echoed; + ``False`` if not (for a password or similar) + """ + self.prompts.append((prompt, echo)) + + +class SubsystemHandler(threading.Thread): + """ + Handler for a subsystem in server mode. If you create a subclass of this + class and pass it to `.Transport.set_subsystem_handler`, an object of this + class will be created for each request for this subsystem. Each new object + will be executed within its own new thread by calling `start_subsystem`. + When that method completes, the channel is closed. + + For example, if you made a subclass ``MP3Handler`` and registered it as the + handler for subsystem ``"mp3"``, then whenever a client has successfully + authenticated and requests subsystem ``"mp3"``, an object of class + ``MP3Handler`` will be created, and `start_subsystem` will be called on + it from a new thread. + """ + + def __init__(self, channel, name, server): + """ + Create a new handler for a channel. This is used by `.ServerInterface` + to start up a new handler when a channel requests this subsystem. You + don't need to override this method, but if you do, be sure to pass the + ``channel`` and ``name`` parameters through to the original + ``__init__`` method here. + + :param .Channel channel: the channel associated with this + subsystem request. + :param str name: name of the requested subsystem. + :param .ServerInterface server: + the server object for the session that started this subsystem + """ + threading.Thread.__init__(self, target=self._run) + self.__channel = channel + self.__transport = channel.get_transport() + self.__name = name + self.__server = server + + def get_server(self): + """ + Return the `.ServerInterface` object associated with this channel and + subsystem. + """ + return self.__server + + def _run(self): + try: + self.__transport._log( + DEBUG, "Starting handler for subsystem {}".format(self.__name) + ) + self.start_subsystem(self.__name, self.__transport, self.__channel) + except Exception as e: + self.__transport._log( + ERROR, + 'Exception in subsystem handler for "{}": {}'.format( + self.__name, e + ), + ) + self.__transport._log(ERROR, util.tb_strings()) + try: + self.finish_subsystem() + except: + pass + + def start_subsystem(self, name, transport, channel): + """ + Process an ssh subsystem in server mode. This method is called on a + new object (and in a new thread) for each subsystem request. It is + assumed that all subsystem logic will take place here, and when the + subsystem is finished, this method will return. After this method + returns, the channel is closed. + + The combination of ``transport`` and ``channel`` are unique; this + handler corresponds to exactly one `.Channel` on one `.Transport`. + + .. note:: + It is the responsibility of this method to exit if the underlying + `.Transport` is closed. This can be done by checking + `.Transport.is_active` or noticing an EOF on the `.Channel`. If + this method loops forever without checking for this case, your + Python interpreter may refuse to exit because this thread will + still be running. + + :param str name: name of the requested subsystem. + :param .Transport transport: the server-mode `.Transport`. + :param .Channel channel: the channel associated with this subsystem + request. + """ + pass + + def finish_subsystem(self): + """ + Perform any cleanup at the end of a subsystem. The default + implementation just closes the channel. + + .. versionadded:: 1.1 + """ + self.__channel.close() diff --git a/.venv/lib/python3.9/site-packages/paramiko/sftp.py b/.venv/lib/python3.9/site-packages/paramiko/sftp.py new file mode 100644 index 0000000..b3528d4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/sftp.py @@ -0,0 +1,224 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import select +import socket +import struct + +from paramiko import util +from paramiko.common import DEBUG, byte_chr, byte_ord +from paramiko.message import Message + + +( + CMD_INIT, + CMD_VERSION, + CMD_OPEN, + CMD_CLOSE, + CMD_READ, + CMD_WRITE, + CMD_LSTAT, + CMD_FSTAT, + CMD_SETSTAT, + CMD_FSETSTAT, + CMD_OPENDIR, + CMD_READDIR, + CMD_REMOVE, + CMD_MKDIR, + CMD_RMDIR, + CMD_REALPATH, + CMD_STAT, + CMD_RENAME, + CMD_READLINK, + CMD_SYMLINK, +) = range(1, 21) +(CMD_STATUS, CMD_HANDLE, CMD_DATA, CMD_NAME, CMD_ATTRS) = range(101, 106) +(CMD_EXTENDED, CMD_EXTENDED_REPLY) = range(200, 202) + +SFTP_OK = 0 +( + SFTP_EOF, + SFTP_NO_SUCH_FILE, + SFTP_PERMISSION_DENIED, + SFTP_FAILURE, + SFTP_BAD_MESSAGE, + SFTP_NO_CONNECTION, + SFTP_CONNECTION_LOST, + SFTP_OP_UNSUPPORTED, +) = range(1, 9) + +SFTP_DESC = [ + "Success", + "End of file", + "No such file", + "Permission denied", + "Failure", + "Bad message", + "No connection", + "Connection lost", + "Operation unsupported", +] + +SFTP_FLAG_READ = 0x1 +SFTP_FLAG_WRITE = 0x2 +SFTP_FLAG_APPEND = 0x4 +SFTP_FLAG_CREATE = 0x8 +SFTP_FLAG_TRUNC = 0x10 +SFTP_FLAG_EXCL = 0x20 + +_VERSION = 3 + + +# for debugging +CMD_NAMES = { + CMD_INIT: "init", + CMD_VERSION: "version", + CMD_OPEN: "open", + CMD_CLOSE: "close", + CMD_READ: "read", + CMD_WRITE: "write", + CMD_LSTAT: "lstat", + CMD_FSTAT: "fstat", + CMD_SETSTAT: "setstat", + CMD_FSETSTAT: "fsetstat", + CMD_OPENDIR: "opendir", + CMD_READDIR: "readdir", + CMD_REMOVE: "remove", + CMD_MKDIR: "mkdir", + CMD_RMDIR: "rmdir", + CMD_REALPATH: "realpath", + CMD_STAT: "stat", + CMD_RENAME: "rename", + CMD_READLINK: "readlink", + CMD_SYMLINK: "symlink", + CMD_STATUS: "status", + CMD_HANDLE: "handle", + CMD_DATA: "data", + CMD_NAME: "name", + CMD_ATTRS: "attrs", + CMD_EXTENDED: "extended", + CMD_EXTENDED_REPLY: "extended_reply", +} + + +# TODO: rewrite SFTP file/server modules' overly-flexible "make a request with +# xyz components" so we don't need this very silly method of signaling whether +# a given Python integer should be 32- or 64-bit. +# NOTE: this only became an issue when dropping Python 2 support; prior to +# doing so, we had to support actual-longs, which served as that signal. This +# is simply recreating that structure in a more tightly scoped fashion. +class int64(int): + pass + + +class SFTPError(Exception): + pass + + +class BaseSFTP: + def __init__(self): + self.logger = util.get_logger("paramiko.sftp") + self.sock = None + self.ultra_debug = False + + # ...internals... + + def _send_version(self): + m = Message() + m.add_int(_VERSION) + self._send_packet(CMD_INIT, m) + t, data = self._read_packet() + if t != CMD_VERSION: + raise SFTPError("Incompatible sftp protocol") + version = struct.unpack(">I", data[:4])[0] + # if version != _VERSION: + # raise SFTPError('Incompatible sftp protocol') + return version + + def _send_server_version(self): + # winscp will freak out if the server sends version info before the + # client finishes sending INIT. + t, data = self._read_packet() + if t != CMD_INIT: + raise SFTPError("Incompatible sftp protocol") + version = struct.unpack(">I", data[:4])[0] + # advertise that we support "check-file" + extension_pairs = ["check-file", "md5,sha1"] + msg = Message() + msg.add_int(_VERSION) + msg.add(*extension_pairs) + self._send_packet(CMD_VERSION, msg) + return version + + def _log(self, level, msg, *args): + self.logger.log(level, msg, *args) + + def _write_all(self, out): + while len(out) > 0: + n = self.sock.send(out) + if n <= 0: + raise EOFError() + if n == len(out): + return + out = out[n:] + return + + def _read_all(self, n): + out = bytes() + while n > 0: + if isinstance(self.sock, socket.socket): + # sometimes sftp is used directly over a socket instead of + # through a paramiko channel. in this case, check periodically + # if the socket is closed. (for some reason, recv() won't ever + # return or raise an exception, but calling select on a closed + # socket will.) + while True: + read, write, err = select.select([self.sock], [], [], 0.1) + if len(read) > 0: + x = self.sock.recv(n) + break + else: + x = self.sock.recv(n) + + if len(x) == 0: + raise EOFError() + out += x + n -= len(x) + return out + + def _send_packet(self, t, packet): + packet = packet.asbytes() + out = struct.pack(">I", len(packet) + 1) + byte_chr(t) + packet + if self.ultra_debug: + self._log(DEBUG, util.format_binary(out, "OUT: ")) + self._write_all(out) + + def _read_packet(self): + x = self._read_all(4) + # most sftp servers won't accept packets larger than about 32k, so + # anything with the high byte set (> 16MB) is just garbage. + if byte_ord(x[0]): + raise SFTPError("Garbage packet received") + size = struct.unpack(">I", x)[0] + data = self._read_all(size) + if self.ultra_debug: + self._log(DEBUG, util.format_binary(data, "IN: ")) + if size > 0: + t = byte_ord(data[0]) + return t, data[1:] + return 0, bytes() diff --git a/.venv/lib/python3.9/site-packages/paramiko/sftp_attr.py b/.venv/lib/python3.9/site-packages/paramiko/sftp_attr.py new file mode 100644 index 0000000..18ffbf8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/sftp_attr.py @@ -0,0 +1,239 @@ +# Copyright (C) 2003-2006 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import stat +import time +from paramiko.common import x80000000, o700, o70, xffffffff + + +class SFTPAttributes: + """ + Representation of the attributes of a file (or proxied file) for SFTP in + client or server mode. It attempts to mirror the object returned by + `os.stat` as closely as possible, so it may have the following fields, + with the same meanings as those returned by an `os.stat` object: + + - ``st_size`` + - ``st_uid`` + - ``st_gid`` + - ``st_mode`` + - ``st_atime`` + - ``st_mtime`` + + Because SFTP allows flags to have other arbitrary named attributes, these + are stored in a dict named ``attr``. Occasionally, the filename is also + stored, in ``filename``. + """ + + FLAG_SIZE = 1 + FLAG_UIDGID = 2 + FLAG_PERMISSIONS = 4 + FLAG_AMTIME = 8 + FLAG_EXTENDED = x80000000 + + def __init__(self): + """ + Create a new (empty) SFTPAttributes object. All fields will be empty. + """ + self._flags = 0 + self.st_size = None + self.st_uid = None + self.st_gid = None + self.st_mode = None + self.st_atime = None + self.st_mtime = None + self.attr = {} + + @classmethod + def from_stat(cls, obj, filename=None): + """ + Create an `.SFTPAttributes` object from an existing ``stat`` object (an + object returned by `os.stat`). + + :param object obj: an object returned by `os.stat` (or equivalent). + :param str filename: the filename associated with this file. + :return: new `.SFTPAttributes` object with the same attribute fields. + """ + attr = cls() + attr.st_size = obj.st_size + attr.st_uid = obj.st_uid + attr.st_gid = obj.st_gid + attr.st_mode = obj.st_mode + attr.st_atime = obj.st_atime + attr.st_mtime = obj.st_mtime + if filename is not None: + attr.filename = filename + return attr + + def __repr__(self): + return "".format(self._debug_str()) + + # ...internals... + @classmethod + def _from_msg(cls, msg, filename=None, longname=None): + attr = cls() + attr._unpack(msg) + if filename is not None: + attr.filename = filename + if longname is not None: + attr.longname = longname + return attr + + def _unpack(self, msg): + self._flags = msg.get_int() + if self._flags & self.FLAG_SIZE: + self.st_size = msg.get_int64() + if self._flags & self.FLAG_UIDGID: + self.st_uid = msg.get_int() + self.st_gid = msg.get_int() + if self._flags & self.FLAG_PERMISSIONS: + self.st_mode = msg.get_int() + if self._flags & self.FLAG_AMTIME: + self.st_atime = msg.get_int() + self.st_mtime = msg.get_int() + if self._flags & self.FLAG_EXTENDED: + count = msg.get_int() + for i in range(count): + self.attr[msg.get_string()] = msg.get_string() + + def _pack(self, msg): + self._flags = 0 + if self.st_size is not None: + self._flags |= self.FLAG_SIZE + if (self.st_uid is not None) and (self.st_gid is not None): + self._flags |= self.FLAG_UIDGID + if self.st_mode is not None: + self._flags |= self.FLAG_PERMISSIONS + if (self.st_atime is not None) and (self.st_mtime is not None): + self._flags |= self.FLAG_AMTIME + if len(self.attr) > 0: + self._flags |= self.FLAG_EXTENDED + msg.add_int(self._flags) + if self._flags & self.FLAG_SIZE: + msg.add_int64(self.st_size) + if self._flags & self.FLAG_UIDGID: + msg.add_int(self.st_uid) + msg.add_int(self.st_gid) + if self._flags & self.FLAG_PERMISSIONS: + msg.add_int(self.st_mode) + if self._flags & self.FLAG_AMTIME: + # throw away any fractional seconds + msg.add_int(int(self.st_atime)) + msg.add_int(int(self.st_mtime)) + if self._flags & self.FLAG_EXTENDED: + msg.add_int(len(self.attr)) + for key, val in self.attr.items(): + msg.add_string(key) + msg.add_string(val) + return + + def _debug_str(self): + out = "[ " + if self.st_size is not None: + out += "size={} ".format(self.st_size) + if (self.st_uid is not None) and (self.st_gid is not None): + out += "uid={} gid={} ".format(self.st_uid, self.st_gid) + if self.st_mode is not None: + out += "mode=" + oct(self.st_mode) + " " + if (self.st_atime is not None) and (self.st_mtime is not None): + out += "atime={} mtime={} ".format(self.st_atime, self.st_mtime) + for k, v in self.attr.items(): + out += '"{}"={!r} '.format(str(k), v) + out += "]" + return out + + @staticmethod + def _rwx(n, suid, sticky=False): + if suid: + suid = 2 + out = "-r"[n >> 2] + "-w"[(n >> 1) & 1] + if sticky: + out += "-xTt"[suid + (n & 1)] + else: + out += "-xSs"[suid + (n & 1)] + return out + + def __str__(self): + """create a unix-style long description of the file (like ls -l)""" + if self.st_mode is not None: + kind = stat.S_IFMT(self.st_mode) + if kind == stat.S_IFIFO: + ks = "p" + elif kind == stat.S_IFCHR: + ks = "c" + elif kind == stat.S_IFDIR: + ks = "d" + elif kind == stat.S_IFBLK: + ks = "b" + elif kind == stat.S_IFREG: + ks = "-" + elif kind == stat.S_IFLNK: + ks = "l" + elif kind == stat.S_IFSOCK: + ks = "s" + else: + ks = "?" + ks += self._rwx( + (self.st_mode & o700) >> 6, self.st_mode & stat.S_ISUID + ) + ks += self._rwx( + (self.st_mode & o70) >> 3, self.st_mode & stat.S_ISGID + ) + ks += self._rwx( + self.st_mode & 7, self.st_mode & stat.S_ISVTX, True + ) + else: + ks = "?---------" + # compute display date + if (self.st_mtime is None) or (self.st_mtime == xffffffff): + # shouldn't really happen + datestr = "(unknown date)" + else: + time_tuple = time.localtime(self.st_mtime) + if abs(time.time() - self.st_mtime) > 15_552_000: + # (15,552,000s = 6 months) + datestr = time.strftime("%d %b %Y", time_tuple) + else: + datestr = time.strftime("%d %b %H:%M", time_tuple) + filename = getattr(self, "filename", "?") + + # not all servers support uid/gid + uid = self.st_uid + gid = self.st_gid + size = self.st_size + if uid is None: + uid = 0 + if gid is None: + gid = 0 + if size is None: + size = 0 + + # TODO: not sure this actually worked as expected beforehand, leaving + # it untouched for the time being, re: .format() upgrade, until someone + # has time to doublecheck + return "%s 1 %-8d %-8d %8d %-12s %s" % ( + ks, + uid, + gid, + size, + datestr, + filename, + ) + + def asbytes(self): + return str(self).encode() diff --git a/.venv/lib/python3.9/site-packages/paramiko/sftp_client.py b/.venv/lib/python3.9/site-packages/paramiko/sftp_client.py new file mode 100644 index 0000000..066cd83 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/sftp_client.py @@ -0,0 +1,965 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of Paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +from binascii import hexlify +import errno +import os +import stat +import threading +import time +import weakref +from paramiko import util +from paramiko.channel import Channel +from paramiko.message import Message +from paramiko.common import INFO, DEBUG, o777 +from paramiko.sftp import ( + BaseSFTP, + CMD_OPENDIR, + CMD_HANDLE, + SFTPError, + CMD_READDIR, + CMD_NAME, + CMD_CLOSE, + SFTP_FLAG_READ, + SFTP_FLAG_WRITE, + SFTP_FLAG_CREATE, + SFTP_FLAG_TRUNC, + SFTP_FLAG_APPEND, + SFTP_FLAG_EXCL, + CMD_OPEN, + CMD_REMOVE, + CMD_RENAME, + CMD_MKDIR, + CMD_RMDIR, + CMD_STAT, + CMD_ATTRS, + CMD_LSTAT, + CMD_SYMLINK, + CMD_SETSTAT, + CMD_READLINK, + CMD_REALPATH, + CMD_STATUS, + CMD_EXTENDED, + SFTP_OK, + SFTP_EOF, + SFTP_NO_SUCH_FILE, + SFTP_PERMISSION_DENIED, + int64, +) + +from paramiko.sftp_attr import SFTPAttributes +from paramiko.ssh_exception import SSHException +from paramiko.sftp_file import SFTPFile +from paramiko.util import ClosingContextManager, b, u + + +def _to_unicode(s): + """ + decode a string as ascii or utf8 if possible (as required by the sftp + protocol). if neither works, just return a byte string because the server + probably doesn't know the filename's encoding. + """ + try: + return s.encode("ascii") + except (UnicodeError, AttributeError): + try: + return s.decode("utf-8") + except UnicodeError: + return s + + +b_slash = b"/" + + +class SFTPClient(BaseSFTP, ClosingContextManager): + """ + SFTP client object. + + Used to open an SFTP session across an open SSH `.Transport` and perform + remote file operations. + + Instances of this class may be used as context managers. + """ + + def __init__(self, sock): + """ + Create an SFTP client from an existing `.Channel`. The channel + should already have requested the ``"sftp"`` subsystem. + + An alternate way to create an SFTP client context is by using + `from_transport`. + + :param .Channel sock: an open `.Channel` using the ``"sftp"`` subsystem + + :raises: + `.SSHException` -- if there's an exception while negotiating sftp + """ + BaseSFTP.__init__(self) + self.sock = sock + self.ultra_debug = False + self.request_number = 1 + # lock for request_number + self._lock = threading.Lock() + self._cwd = None + # request # -> SFTPFile + self._expecting = weakref.WeakValueDictionary() + if type(sock) is Channel: + # override default logger + transport = self.sock.get_transport() + self.logger = util.get_logger( + transport.get_log_channel() + ".sftp" + ) + self.ultra_debug = transport.get_hexdump() + try: + server_version = self._send_version() + except EOFError: + raise SSHException("EOF during negotiation") + self._log( + INFO, + "Opened sftp connection (server version {})".format( + server_version + ), + ) + + @classmethod + def from_transport(cls, t, window_size=None, max_packet_size=None): + """ + Create an SFTP client channel from an open `.Transport`. + + Setting the window and packet sizes might affect the transfer speed. + The default settings in the `.Transport` class are the same as in + OpenSSH and should work adequately for both files transfers and + interactive sessions. + + :param .Transport t: an open `.Transport` which is already + authenticated + :param int window_size: + optional window size for the `.SFTPClient` session. + :param int max_packet_size: + optional max packet size for the `.SFTPClient` session.. + + :return: + a new `.SFTPClient` object, referring to an sftp session (channel) + across the transport + + .. versionchanged:: 1.15 + Added the ``window_size`` and ``max_packet_size`` arguments. + """ + chan = t.open_session( + window_size=window_size, max_packet_size=max_packet_size + ) + if chan is None: + return None + chan.invoke_subsystem("sftp") + return cls(chan) + + def _log(self, level, msg, *args): + if isinstance(msg, list): + for m in msg: + self._log(level, m, *args) + else: + # NOTE: these bits MUST continue using %-style format junk because + # logging.Logger.log() explicitly requires it. Grump. + # escape '%' in msg (they could come from file or directory names) + # before logging + msg = msg.replace("%", "%%") + super()._log( + level, + "[chan %s] " + msg, + *([self.sock.get_name()] + list(args)) + ) + + def close(self): + """ + Close the SFTP session and its underlying channel. + + .. versionadded:: 1.4 + """ + self._log(INFO, "sftp session closed.") + self.sock.close() + + def get_channel(self): + """ + Return the underlying `.Channel` object for this SFTP session. This + might be useful for doing things like setting a timeout on the channel. + + .. versionadded:: 1.7.1 + """ + return self.sock + + def listdir(self, path="."): + """ + Return a list containing the names of the entries in the given + ``path``. + + The list is in arbitrary order. It does not include the special + entries ``'.'`` and ``'..'`` even if they are present in the folder. + This method is meant to mirror ``os.listdir`` as closely as possible. + For a list of full `.SFTPAttributes` objects, see `listdir_attr`. + + :param str path: path to list (defaults to ``'.'``) + """ + return [f.filename for f in self.listdir_attr(path)] + + def listdir_attr(self, path="."): + """ + Return a list containing `.SFTPAttributes` objects corresponding to + files in the given ``path``. The list is in arbitrary order. It does + not include the special entries ``'.'`` and ``'..'`` even if they are + present in the folder. + + The returned `.SFTPAttributes` objects will each have an additional + field: ``longname``, which may contain a formatted string of the file's + attributes, in unix format. The content of this string will probably + depend on the SFTP server implementation. + + :param str path: path to list (defaults to ``'.'``) + :return: list of `.SFTPAttributes` objects + + .. versionadded:: 1.2 + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "listdir({!r})".format(path)) + t, msg = self._request(CMD_OPENDIR, path) + if t != CMD_HANDLE: + raise SFTPError("Expected handle") + handle = msg.get_binary() + filelist = [] + while True: + try: + t, msg = self._request(CMD_READDIR, handle) + except EOFError: + # done with handle + break + if t != CMD_NAME: + raise SFTPError("Expected name response") + count = msg.get_int() + for i in range(count): + filename = msg.get_text() + longname = msg.get_text() + attr = SFTPAttributes._from_msg(msg, filename, longname) + if (filename != ".") and (filename != ".."): + filelist.append(attr) + self._request(CMD_CLOSE, handle) + return filelist + + def listdir_iter(self, path=".", read_aheads=50): + """ + Generator version of `.listdir_attr`. + + See the API docs for `.listdir_attr` for overall details. + + This function adds one more kwarg on top of `.listdir_attr`: + ``read_aheads``, an integer controlling how many + ``SSH_FXP_READDIR`` requests are made to the server. The default of 50 + should suffice for most file listings as each request/response cycle + may contain multiple files (dependent on server implementation.) + + .. versionadded:: 1.15 + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "listdir({!r})".format(path)) + t, msg = self._request(CMD_OPENDIR, path) + + if t != CMD_HANDLE: + raise SFTPError("Expected handle") + + handle = msg.get_string() + + nums = list() + while True: + try: + # Send out a bunch of readdir requests so that we can read the + # responses later on Section 6.7 of the SSH file transfer RFC + # explains this + # http://filezilla-project.org/specs/draft-ietf-secsh-filexfer-02.txt + for i in range(read_aheads): + num = self._async_request(type(None), CMD_READDIR, handle) + nums.append(num) + + # For each of our sent requests + # Read and parse the corresponding packets + # If we're at the end of our queued requests, then fire off + # some more requests + # Exit the loop when we've reached the end of the directory + # handle + for num in nums: + t, pkt_data = self._read_packet() + msg = Message(pkt_data) + new_num = msg.get_int() + if num == new_num: + if t == CMD_STATUS: + self._convert_status(msg) + count = msg.get_int() + for i in range(count): + filename = msg.get_text() + longname = msg.get_text() + attr = SFTPAttributes._from_msg( + msg, filename, longname + ) + if (filename != ".") and (filename != ".."): + yield attr + + # If we've hit the end of our queued requests, reset nums. + nums = list() + + except EOFError: + self._request(CMD_CLOSE, handle) + return + + def open(self, filename, mode="r", bufsize=-1): + """ + Open a file on the remote server. The arguments are the same as for + Python's built-in `python:file` (aka `python:open`). A file-like + object is returned, which closely mimics the behavior of a normal + Python file object, including the ability to be used as a context + manager. + + The mode indicates how the file is to be opened: ``'r'`` for reading, + ``'w'`` for writing (truncating an existing file), ``'a'`` for + appending, ``'r+'`` for reading/writing, ``'w+'`` for reading/writing + (truncating an existing file), ``'a+'`` for reading/appending. The + Python ``'b'`` flag is ignored, since SSH treats all files as binary. + The ``'U'`` flag is supported in a compatible way. + + Since 1.5.2, an ``'x'`` flag indicates that the operation should only + succeed if the file was created and did not previously exist. This has + no direct mapping to Python's file flags, but is commonly known as the + ``O_EXCL`` flag in posix. + + The file will be buffered in standard Python style by default, but + can be altered with the ``bufsize`` parameter. ``<=0`` turns off + buffering, ``1`` uses line buffering, and any number greater than 1 + (``>1``) uses that specific buffer size. + + :param str filename: name of the file to open + :param str mode: mode (Python-style) to open in + :param int bufsize: desired buffering (default: ``-1``) + :return: an `.SFTPFile` object representing the open file + + :raises: ``IOError`` -- if the file could not be opened. + """ + filename = self._adjust_cwd(filename) + self._log(DEBUG, "open({!r}, {!r})".format(filename, mode)) + imode = 0 + if ("r" in mode) or ("+" in mode): + imode |= SFTP_FLAG_READ + if ("w" in mode) or ("+" in mode) or ("a" in mode): + imode |= SFTP_FLAG_WRITE + if "w" in mode: + imode |= SFTP_FLAG_CREATE | SFTP_FLAG_TRUNC + if "a" in mode: + imode |= SFTP_FLAG_CREATE | SFTP_FLAG_APPEND + if "x" in mode: + imode |= SFTP_FLAG_CREATE | SFTP_FLAG_EXCL + attrblock = SFTPAttributes() + t, msg = self._request(CMD_OPEN, filename, imode, attrblock) + if t != CMD_HANDLE: + raise SFTPError("Expected handle") + handle = msg.get_binary() + self._log( + DEBUG, + "open({!r}, {!r}) -> {}".format( + filename, mode, u(hexlify(handle)) + ), + ) + return SFTPFile(self, handle, mode, bufsize) + + # Python continues to vacillate about "open" vs "file"... + file = open + + def remove(self, path): + """ + Remove the file at the given path. This only works on files; for + removing folders (directories), use `rmdir`. + + :param str path: path (absolute or relative) of the file to remove + + :raises: ``IOError`` -- if the path refers to a folder (directory) + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "remove({!r})".format(path)) + self._request(CMD_REMOVE, path) + + unlink = remove + + def rename(self, oldpath, newpath): + """ + Rename a file or folder from ``oldpath`` to ``newpath``. + + .. note:: + This method implements 'standard' SFTP ``RENAME`` behavior; those + seeking the OpenSSH "POSIX rename" extension behavior should use + `posix_rename`. + + :param str oldpath: + existing name of the file or folder + :param str newpath: + new name for the file or folder, must not exist already + + :raises: + ``IOError`` -- if ``newpath`` is a folder, or something else goes + wrong + """ + oldpath = self._adjust_cwd(oldpath) + newpath = self._adjust_cwd(newpath) + self._log(DEBUG, "rename({!r}, {!r})".format(oldpath, newpath)) + self._request(CMD_RENAME, oldpath, newpath) + + def posix_rename(self, oldpath, newpath): + """ + Rename a file or folder from ``oldpath`` to ``newpath``, following + posix conventions. + + :param str oldpath: existing name of the file or folder + :param str newpath: new name for the file or folder, will be + overwritten if it already exists + + :raises: + ``IOError`` -- if ``newpath`` is a folder, posix-rename is not + supported by the server or something else goes wrong + + :versionadded: 2.2 + """ + oldpath = self._adjust_cwd(oldpath) + newpath = self._adjust_cwd(newpath) + self._log(DEBUG, "posix_rename({!r}, {!r})".format(oldpath, newpath)) + self._request( + CMD_EXTENDED, "posix-rename@openssh.com", oldpath, newpath + ) + + def mkdir(self, path, mode=o777): + """ + Create a folder (directory) named ``path`` with numeric mode ``mode``. + The default mode is 0777 (octal). On some systems, mode is ignored. + Where it is used, the current umask value is first masked out. + + :param str path: name of the folder to create + :param int mode: permissions (posix-style) for the newly-created folder + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "mkdir({!r}, {!r})".format(path, mode)) + attr = SFTPAttributes() + attr.st_mode = mode + self._request(CMD_MKDIR, path, attr) + + def rmdir(self, path): + """ + Remove the folder named ``path``. + + :param str path: name of the folder to remove + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "rmdir({!r})".format(path)) + self._request(CMD_RMDIR, path) + + def stat(self, path): + """ + Retrieve information about a file on the remote system. The return + value is an object whose attributes correspond to the attributes of + Python's ``stat`` structure as returned by ``os.stat``, except that it + contains fewer fields. An SFTP server may return as much or as little + info as it wants, so the results may vary from server to server. + + Unlike a Python `python:stat` object, the result may not be accessed as + a tuple. This is mostly due to the author's slack factor. + + The fields supported are: ``st_mode``, ``st_size``, ``st_uid``, + ``st_gid``, ``st_atime``, and ``st_mtime``. + + :param str path: the filename to stat + :return: + an `.SFTPAttributes` object containing attributes about the given + file + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "stat({!r})".format(path)) + t, msg = self._request(CMD_STAT, path) + if t != CMD_ATTRS: + raise SFTPError("Expected attributes") + return SFTPAttributes._from_msg(msg) + + def lstat(self, path): + """ + Retrieve information about a file on the remote system, without + following symbolic links (shortcuts). This otherwise behaves exactly + the same as `stat`. + + :param str path: the filename to stat + :return: + an `.SFTPAttributes` object containing attributes about the given + file + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "lstat({!r})".format(path)) + t, msg = self._request(CMD_LSTAT, path) + if t != CMD_ATTRS: + raise SFTPError("Expected attributes") + return SFTPAttributes._from_msg(msg) + + def symlink(self, source, dest): + """ + Create a symbolic link to the ``source`` path at ``destination``. + + :param str source: path of the original file + :param str dest: path of the newly created symlink + """ + dest = self._adjust_cwd(dest) + self._log(DEBUG, "symlink({!r}, {!r})".format(source, dest)) + source = b(source) + self._request(CMD_SYMLINK, source, dest) + + def chmod(self, path, mode): + """ + Change the mode (permissions) of a file. The permissions are + unix-style and identical to those used by Python's `os.chmod` + function. + + :param str path: path of the file to change the permissions of + :param int mode: new permissions + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "chmod({!r}, {!r})".format(path, mode)) + attr = SFTPAttributes() + attr.st_mode = mode + self._request(CMD_SETSTAT, path, attr) + + def chown(self, path, uid, gid): + """ + Change the owner (``uid``) and group (``gid``) of a file. As with + Python's `os.chown` function, you must pass both arguments, so if you + only want to change one, use `stat` first to retrieve the current + owner and group. + + :param str path: path of the file to change the owner and group of + :param int uid: new owner's uid + :param int gid: new group id + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "chown({!r}, {!r}, {!r})".format(path, uid, gid)) + attr = SFTPAttributes() + attr.st_uid, attr.st_gid = uid, gid + self._request(CMD_SETSTAT, path, attr) + + def utime(self, path, times): + """ + Set the access and modified times of the file specified by ``path``. + If ``times`` is ``None``, then the file's access and modified times + are set to the current time. Otherwise, ``times`` must be a 2-tuple + of numbers, of the form ``(atime, mtime)``, which is used to set the + access and modified times, respectively. This bizarre API is mimicked + from Python for the sake of consistency -- I apologize. + + :param str path: path of the file to modify + :param tuple times: + ``None`` or a tuple of (access time, modified time) in standard + internet epoch time (seconds since 01 January 1970 GMT) + """ + path = self._adjust_cwd(path) + if times is None: + times = (time.time(), time.time()) + self._log(DEBUG, "utime({!r}, {!r})".format(path, times)) + attr = SFTPAttributes() + attr.st_atime, attr.st_mtime = times + self._request(CMD_SETSTAT, path, attr) + + def truncate(self, path, size): + """ + Change the size of the file specified by ``path``. This usually + extends or shrinks the size of the file, just like the `~file.truncate` + method on Python file objects. + + :param str path: path of the file to modify + :param int size: the new size of the file + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "truncate({!r}, {!r})".format(path, size)) + attr = SFTPAttributes() + attr.st_size = size + self._request(CMD_SETSTAT, path, attr) + + def readlink(self, path): + """ + Return the target of a symbolic link (shortcut). You can use + `symlink` to create these. The result may be either an absolute or + relative pathname. + + :param str path: path of the symbolic link file + :return: target path, as a `str` + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "readlink({!r})".format(path)) + t, msg = self._request(CMD_READLINK, path) + if t != CMD_NAME: + raise SFTPError("Expected name response") + count = msg.get_int() + if count == 0: + return None + if count != 1: + raise SFTPError("Readlink returned {} results".format(count)) + return _to_unicode(msg.get_string()) + + def normalize(self, path): + """ + Return the normalized path (on the server) of a given path. This + can be used to quickly resolve symbolic links or determine what the + server is considering to be the "current folder" (by passing ``'.'`` + as ``path``). + + :param str path: path to be normalized + :return: normalized form of the given path (as a `str`) + + :raises: ``IOError`` -- if the path can't be resolved on the server + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "normalize({!r})".format(path)) + t, msg = self._request(CMD_REALPATH, path) + if t != CMD_NAME: + raise SFTPError("Expected name response") + count = msg.get_int() + if count != 1: + raise SFTPError("Realpath returned {} results".format(count)) + return msg.get_text() + + def chdir(self, path=None): + """ + Change the "current directory" of this SFTP session. Since SFTP + doesn't really have the concept of a current working directory, this is + emulated by Paramiko. Once you use this method to set a working + directory, all operations on this `.SFTPClient` object will be relative + to that path. You can pass in ``None`` to stop using a current working + directory. + + :param str path: new current working directory + + :raises: + ``IOError`` -- if the requested path doesn't exist on the server + + .. versionadded:: 1.4 + """ + if path is None: + self._cwd = None + return + if not stat.S_ISDIR(self.stat(path).st_mode): + code = errno.ENOTDIR + raise SFTPError(code, "{}: {}".format(os.strerror(code), path)) + self._cwd = b(self.normalize(path)) + + def getcwd(self): + """ + Return the "current working directory" for this SFTP session, as + emulated by Paramiko. If no directory has been set with `chdir`, + this method will return ``None``. + + .. versionadded:: 1.4 + """ + # TODO: make class initialize with self._cwd set to self.normalize('.') + return self._cwd and u(self._cwd) + + def _transfer_with_callback(self, reader, writer, file_size, callback): + size = 0 + while True: + data = reader.read(32768) + writer.write(data) + size += len(data) + if len(data) == 0: + break + if callback is not None: + callback(size, file_size) + return size + + def putfo(self, fl, remotepath, file_size=0, callback=None, confirm=True): + """ + Copy the contents of an open file object (``fl``) to the SFTP server as + ``remotepath``. Any exception raised by operations will be passed + through. + + The SFTP operations use pipelining for speed. + + :param fl: opened file or file-like object to copy + :param str remotepath: the destination path on the SFTP server + :param int file_size: + optional size parameter passed to callback. If none is specified, + size defaults to 0 + :param callable callback: + optional callback function (form: ``func(int, int)``) that accepts + the bytes transferred so far and the total bytes to be transferred + (since 1.7.4) + :param bool confirm: + whether to do a stat() on the file afterwards to confirm the file + size (since 1.7.7) + + :return: + an `.SFTPAttributes` object containing attributes about the given + file. + + .. versionadded:: 1.10 + """ + with self.file(remotepath, "wb") as fr: + fr.set_pipelined(True) + size = self._transfer_with_callback( + reader=fl, writer=fr, file_size=file_size, callback=callback + ) + if confirm: + s = self.stat(remotepath) + if s.st_size != size: + raise IOError( + "size mismatch in put! {} != {}".format(s.st_size, size) + ) + else: + s = SFTPAttributes() + return s + + def put(self, localpath, remotepath, callback=None, confirm=True): + """ + Copy a local file (``localpath``) to the SFTP server as ``remotepath``. + Any exception raised by operations will be passed through. This + method is primarily provided as a convenience. + + The SFTP operations use pipelining for speed. + + :param str localpath: the local file to copy + :param str remotepath: the destination path on the SFTP server. Note + that the filename should be included. Only specifying a directory + may result in an error. + :param callable callback: + optional callback function (form: ``func(int, int)``) that accepts + the bytes transferred so far and the total bytes to be transferred + :param bool confirm: + whether to do a stat() on the file afterwards to confirm the file + size + + :return: an `.SFTPAttributes` object containing attributes about the + given file + + .. versionadded:: 1.4 + .. versionchanged:: 1.7.4 + ``callback`` and rich attribute return value added. + .. versionchanged:: 1.7.7 + ``confirm`` param added. + """ + file_size = os.stat(localpath).st_size + with open(localpath, "rb") as fl: + return self.putfo(fl, remotepath, file_size, callback, confirm) + + def getfo( + self, + remotepath, + fl, + callback=None, + prefetch=True, + max_concurrent_prefetch_requests=None, + ): + """ + Copy a remote file (``remotepath``) from the SFTP server and write to + an open file or file-like object, ``fl``. Any exception raised by + operations will be passed through. This method is primarily provided + as a convenience. + + :param object remotepath: opened file or file-like object to copy to + :param str fl: + the destination path on the local host or open file object + :param callable callback: + optional callback function (form: ``func(int, int)``) that accepts + the bytes transferred so far and the total bytes to be transferred + :param bool prefetch: + controls whether prefetching is performed (default: True) + :param int max_concurrent_prefetch_requests: + The maximum number of concurrent read requests to prefetch. See + `.SFTPClient.get` (its ``max_concurrent_prefetch_requests`` param) + for details. + :return: the `number ` of bytes written to the opened file object + + .. versionadded:: 1.10 + .. versionchanged:: 2.8 + Added the ``prefetch`` keyword argument. + .. versionchanged:: 3.3 + Added ``max_concurrent_prefetch_requests``. + """ + file_size = self.stat(remotepath).st_size + with self.open(remotepath, "rb") as fr: + if prefetch: + fr.prefetch(file_size, max_concurrent_prefetch_requests) + return self._transfer_with_callback( + reader=fr, writer=fl, file_size=file_size, callback=callback + ) + + def get( + self, + remotepath, + localpath, + callback=None, + prefetch=True, + max_concurrent_prefetch_requests=None, + ): + """ + Copy a remote file (``remotepath``) from the SFTP server to the local + host as ``localpath``. Any exception raised by operations will be + passed through. This method is primarily provided as a convenience. + + :param str remotepath: the remote file to copy + :param str localpath: the destination path on the local host + :param callable callback: + optional callback function (form: ``func(int, int)``) that accepts + the bytes transferred so far and the total bytes to be transferred + :param bool prefetch: + controls whether prefetching is performed (default: True) + :param int max_concurrent_prefetch_requests: + The maximum number of concurrent read requests to prefetch. + When this is ``None`` (the default), do not limit the number of + concurrent prefetch requests. Note: OpenSSH's sftp internally + imposes a limit of 64 concurrent requests, while Paramiko imposes + no limit by default; consider setting a limit if a file can be + successfully received with sftp but hangs with Paramiko. + + .. versionadded:: 1.4 + .. versionchanged:: 1.7.4 + Added the ``callback`` param + .. versionchanged:: 2.8 + Added the ``prefetch`` keyword argument. + .. versionchanged:: 3.3 + Added ``max_concurrent_prefetch_requests``. + """ + with open(localpath, "wb") as fl: + size = self.getfo( + remotepath, + fl, + callback, + prefetch, + max_concurrent_prefetch_requests, + ) + s = os.stat(localpath) + if s.st_size != size: + raise IOError( + "size mismatch in get! {} != {}".format(s.st_size, size) + ) + + # ...internals... + + def _request(self, t, *args): + num = self._async_request(type(None), t, *args) + return self._read_response(num) + + def _async_request(self, fileobj, t, *args): + # this method may be called from other threads (prefetch) + self._lock.acquire() + try: + msg = Message() + msg.add_int(self.request_number) + for item in args: + if isinstance(item, int64): + msg.add_int64(item) + elif isinstance(item, int): + msg.add_int(item) + elif isinstance(item, SFTPAttributes): + item._pack(msg) + else: + # For all other types, rely on as_string() to either coerce + # to bytes before writing or raise a suitable exception. + msg.add_string(item) + num = self.request_number + self._expecting[num] = fileobj + self.request_number += 1 + finally: + self._lock.release() + self._send_packet(t, msg) + return num + + def _read_response(self, waitfor=None): + while True: + try: + t, data = self._read_packet() + except EOFError as e: + raise SSHException("Server connection dropped: {}".format(e)) + msg = Message(data) + num = msg.get_int() + self._lock.acquire() + try: + if num not in self._expecting: + # might be response for a file that was closed before + # responses came back + self._log(DEBUG, "Unexpected response #{}".format(num)) + if waitfor is None: + # just doing a single check + break + continue + fileobj = self._expecting[num] + del self._expecting[num] + finally: + self._lock.release() + if num == waitfor: + # synchronous + if t == CMD_STATUS: + self._convert_status(msg) + return t, msg + + # can not rewrite this to deal with E721, either as a None check + # nor as not an instance of None or NoneType + if fileobj is not type(None): # noqa + fileobj._async_response(t, msg, num) + if waitfor is None: + # just doing a single check + break + return None, None + + def _finish_responses(self, fileobj): + while fileobj in self._expecting.values(): + self._read_response() + fileobj._check_exception() + + def _convert_status(self, msg): + """ + Raises EOFError or IOError on error status; otherwise does nothing. + """ + code = msg.get_int() + text = msg.get_text() + if code == SFTP_OK: + return + elif code == SFTP_EOF: + raise EOFError(text) + elif code == SFTP_NO_SUCH_FILE: + # clever idea from john a. meinel: map the error codes to errno + raise IOError(errno.ENOENT, text) + elif code == SFTP_PERMISSION_DENIED: + raise IOError(errno.EACCES, text) + else: + raise IOError(text) + + def _adjust_cwd(self, path): + """ + Return an adjusted path if we're emulating a "current working + directory" for the server. + """ + path = b(path) + if self._cwd is None: + return path + if len(path) and path[0:1] == b_slash: + # absolute path + return path + if self._cwd == b_slash: + return self._cwd + path + return self._cwd + b_slash + path + + +class SFTP(SFTPClient): + """ + An alias for `.SFTPClient` for backwards compatibility. + """ + + pass diff --git a/.venv/lib/python3.9/site-packages/paramiko/sftp_file.py b/.venv/lib/python3.9/site-packages/paramiko/sftp_file.py new file mode 100644 index 0000000..c74695e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/sftp_file.py @@ -0,0 +1,594 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +SFTP file object +""" + + +from binascii import hexlify +from collections import deque +import socket +import threading +import time +from paramiko.common import DEBUG, io_sleep + +from paramiko.file import BufferedFile +from paramiko.util import u +from paramiko.sftp import ( + CMD_CLOSE, + CMD_READ, + CMD_DATA, + SFTPError, + CMD_WRITE, + CMD_STATUS, + CMD_FSTAT, + CMD_ATTRS, + CMD_FSETSTAT, + CMD_EXTENDED, + int64, +) +from paramiko.sftp_attr import SFTPAttributes + + +class SFTPFile(BufferedFile): + """ + Proxy object for a file on the remote server, in client mode SFTP. + + Instances of this class may be used as context managers in the same way + that built-in Python file objects are. + """ + + # Some sftp servers will choke if you send read/write requests larger than + # this size. + MAX_REQUEST_SIZE = 32768 + + def __init__(self, sftp, handle, mode="r", bufsize=-1): + BufferedFile.__init__(self) + self.sftp = sftp + self.handle = handle + BufferedFile._set_mode(self, mode, bufsize) + self.pipelined = False + self._prefetching = False + self._prefetch_done = False + self._prefetch_data = {} + self._prefetch_extents = {} + self._prefetch_lock = threading.Lock() + self._saved_exception = None + self._reqs = deque() + + def __del__(self): + self._close(async_=True) + + def close(self): + """ + Close the file. + """ + self._close(async_=False) + + def _close(self, async_=False): + # We allow double-close without signaling an error, because real + # Python file objects do. However, we must protect against actually + # sending multiple CMD_CLOSE packets, because after we close our + # handle, the same handle may be re-allocated by the server, and we + # may end up mysteriously closing some random other file. (This is + # especially important because we unconditionally call close() from + # __del__.) + if self._closed: + return + self.sftp._log(DEBUG, "close({})".format(u(hexlify(self.handle)))) + if self.pipelined: + self.sftp._finish_responses(self) + BufferedFile.close(self) + try: + if async_: + # GC'd file handle could be called from an arbitrary thread + # -- don't wait for a response + self.sftp._async_request(type(None), CMD_CLOSE, self.handle) + else: + self.sftp._request(CMD_CLOSE, self.handle) + except EOFError: + # may have outlived the Transport connection + pass + except (IOError, socket.error): + # may have outlived the Transport connection + pass + + def _data_in_prefetch_requests(self, offset, size): + k = [ + x for x in list(self._prefetch_extents.values()) if x[0] <= offset + ] + if len(k) == 0: + return False + k.sort(key=lambda x: x[0]) + buf_offset, buf_size = k[-1] + if buf_offset + buf_size <= offset: + # prefetch request ends before this one begins + return False + if buf_offset + buf_size >= offset + size: + # inclusive + return True + # well, we have part of the request. see if another chunk has + # the rest. + return self._data_in_prefetch_requests( + buf_offset + buf_size, offset + size - buf_offset - buf_size + ) + + def _data_in_prefetch_buffers(self, offset): + """ + if a block of data is present in the prefetch buffers, at the given + offset, return the offset of the relevant prefetch buffer. otherwise, + return None. this guarantees nothing about the number of bytes + collected in the prefetch buffer so far. + """ + k = [i for i in self._prefetch_data.keys() if i <= offset] + if len(k) == 0: + return None + index = max(k) + buf_offset = offset - index + if buf_offset >= len(self._prefetch_data[index]): + # it's not here + return None + return index + + def _read_prefetch(self, size): + """ + read data out of the prefetch buffer, if possible. if the data isn't + in the buffer, return None. otherwise, behaves like a normal read. + """ + # while not closed, and haven't fetched past the current position, + # and haven't reached EOF... + while True: + offset = self._data_in_prefetch_buffers(self._realpos) + if offset is not None: + break + if self._prefetch_done or self._closed: + break + self.sftp._read_response() + self._check_exception() + if offset is None: + self._prefetching = False + return None + prefetch = self._prefetch_data[offset] + del self._prefetch_data[offset] + + buf_offset = self._realpos - offset + if buf_offset > 0: + self._prefetch_data[offset] = prefetch[:buf_offset] + prefetch = prefetch[buf_offset:] + if size < len(prefetch): + self._prefetch_data[self._realpos + size] = prefetch[size:] + prefetch = prefetch[:size] + return prefetch + + def _read(self, size): + size = min(size, self.MAX_REQUEST_SIZE) + if self._prefetching: + data = self._read_prefetch(size) + if data is not None: + return data + t, msg = self.sftp._request( + CMD_READ, self.handle, int64(self._realpos), int(size) + ) + if t != CMD_DATA: + raise SFTPError("Expected data") + return msg.get_string() + + def _write(self, data): + # may write less than requested if it would exceed max packet size + chunk = min(len(data), self.MAX_REQUEST_SIZE) + sftp_async_request = self.sftp._async_request( + type(None), + CMD_WRITE, + self.handle, + int64(self._realpos), + data[:chunk], + ) + self._reqs.append(sftp_async_request) + if not self.pipelined or ( + len(self._reqs) > 100 and self.sftp.sock.recv_ready() + ): + while len(self._reqs): + req = self._reqs.popleft() + t, msg = self.sftp._read_response(req) + if t != CMD_STATUS: + raise SFTPError("Expected status") + # convert_status already called + return chunk + + def settimeout(self, timeout): + """ + Set a timeout on read/write operations on the underlying socket or + ssh `.Channel`. + + :param float timeout: + seconds to wait for a pending read/write operation before raising + ``socket.timeout``, or ``None`` for no timeout + + .. seealso:: `.Channel.settimeout` + """ + self.sftp.sock.settimeout(timeout) + + def gettimeout(self): + """ + Returns the timeout in seconds (as a `float`) associated with the + socket or ssh `.Channel` used for this file. + + .. seealso:: `.Channel.gettimeout` + """ + return self.sftp.sock.gettimeout() + + def setblocking(self, blocking): + """ + Set blocking or non-blocking mode on the underiying socket or ssh + `.Channel`. + + :param int blocking: + 0 to set non-blocking mode; non-0 to set blocking mode. + + .. seealso:: `.Channel.setblocking` + """ + self.sftp.sock.setblocking(blocking) + + def seekable(self): + """ + Check if the file supports random access. + + :return: + `True` if the file supports random access. If `False`, + :meth:`seek` will raise an exception + """ + return True + + def seek(self, offset, whence=0): + """ + Set the file's current position. + + See `file.seek` for details. + """ + self.flush() + if whence == self.SEEK_SET: + self._realpos = self._pos = offset + elif whence == self.SEEK_CUR: + self._pos += offset + self._realpos = self._pos + else: + self._realpos = self._pos = self._get_size() + offset + self._rbuffer = bytes() + + def stat(self): + """ + Retrieve information about this file from the remote system. This is + exactly like `.SFTPClient.stat`, except that it operates on an + already-open file. + + :returns: + an `.SFTPAttributes` object containing attributes about this file. + """ + t, msg = self.sftp._request(CMD_FSTAT, self.handle) + if t != CMD_ATTRS: + raise SFTPError("Expected attributes") + return SFTPAttributes._from_msg(msg) + + def chmod(self, mode): + """ + Change the mode (permissions) of this file. The permissions are + unix-style and identical to those used by Python's `os.chmod` + function. + + :param int mode: new permissions + """ + self.sftp._log( + DEBUG, "chmod({}, {!r})".format(hexlify(self.handle), mode) + ) + attr = SFTPAttributes() + attr.st_mode = mode + self.sftp._request(CMD_FSETSTAT, self.handle, attr) + + def chown(self, uid, gid): + """ + Change the owner (``uid``) and group (``gid``) of this file. As with + Python's `os.chown` function, you must pass both arguments, so if you + only want to change one, use `stat` first to retrieve the current + owner and group. + + :param int uid: new owner's uid + :param int gid: new group id + """ + self.sftp._log( + DEBUG, + "chown({}, {!r}, {!r})".format(hexlify(self.handle), uid, gid), + ) + attr = SFTPAttributes() + attr.st_uid, attr.st_gid = uid, gid + self.sftp._request(CMD_FSETSTAT, self.handle, attr) + + def utime(self, times): + """ + Set the access and modified times of this file. If + ``times`` is ``None``, then the file's access and modified times are + set to the current time. Otherwise, ``times`` must be a 2-tuple of + numbers, of the form ``(atime, mtime)``, which is used to set the + access and modified times, respectively. This bizarre API is mimicked + from Python for the sake of consistency -- I apologize. + + :param tuple times: + ``None`` or a tuple of (access time, modified time) in standard + internet epoch time (seconds since 01 January 1970 GMT) + """ + if times is None: + times = (time.time(), time.time()) + self.sftp._log( + DEBUG, "utime({}, {!r})".format(hexlify(self.handle), times) + ) + attr = SFTPAttributes() + attr.st_atime, attr.st_mtime = times + self.sftp._request(CMD_FSETSTAT, self.handle, attr) + + def truncate(self, size): + """ + Change the size of this file. This usually extends + or shrinks the size of the file, just like the ``truncate()`` method on + Python file objects. + + :param size: the new size of the file + """ + self.sftp._log( + DEBUG, "truncate({}, {!r})".format(hexlify(self.handle), size) + ) + attr = SFTPAttributes() + attr.st_size = size + self.sftp._request(CMD_FSETSTAT, self.handle, attr) + + def check(self, hash_algorithm, offset=0, length=0, block_size=0): + """ + Ask the server for a hash of a section of this file. This can be used + to verify a successful upload or download, or for various rsync-like + operations. + + The file is hashed from ``offset``, for ``length`` bytes. + If ``length`` is 0, the remainder of the file is hashed. Thus, if both + ``offset`` and ``length`` are zero, the entire file is hashed. + + Normally, ``block_size`` will be 0 (the default), and this method will + return a byte string representing the requested hash (for example, a + string of length 16 for MD5, or 20 for SHA-1). If a non-zero + ``block_size`` is given, each chunk of the file (from ``offset`` to + ``offset + length``) of ``block_size`` bytes is computed as a separate + hash. The hash results are all concatenated and returned as a single + string. + + For example, ``check('sha1', 0, 1024, 512)`` will return a string of + length 40. The first 20 bytes will be the SHA-1 of the first 512 bytes + of the file, and the last 20 bytes will be the SHA-1 of the next 512 + bytes. + + :param str hash_algorithm: + the name of the hash algorithm to use (normally ``"sha1"`` or + ``"md5"``) + :param offset: + offset into the file to begin hashing (0 means to start from the + beginning) + :param length: + number of bytes to hash (0 means continue to the end of the file) + :param int block_size: + number of bytes to hash per result (must not be less than 256; 0 + means to compute only one hash of the entire segment) + :return: + `str` of bytes representing the hash of each block, concatenated + together + + :raises: + ``IOError`` -- if the server doesn't support the "check-file" + extension, or possibly doesn't support the hash algorithm requested + + .. note:: Many (most?) servers don't support this extension yet. + + .. versionadded:: 1.4 + """ + t, msg = self.sftp._request( + CMD_EXTENDED, + "check-file", + self.handle, + hash_algorithm, + int64(offset), + int64(length), + block_size, + ) + msg.get_text() # ext + msg.get_text() # alg + data = msg.get_remainder() + return data + + def set_pipelined(self, pipelined=True): + """ + Turn on/off the pipelining of write operations to this file. When + pipelining is on, paramiko won't wait for the server response after + each write operation. Instead, they're collected as they come in. At + the first non-write operation (including `.close`), all remaining + server responses are collected. This means that if there was an error + with one of your later writes, an exception might be thrown from within + `.close` instead of `.write`. + + By default, files are not pipelined. + + :param bool pipelined: + ``True`` if pipelining should be turned on for this file; ``False`` + otherwise + + .. versionadded:: 1.5 + """ + self.pipelined = pipelined + + def prefetch(self, file_size=None, max_concurrent_requests=None): + """ + Pre-fetch the remaining contents of this file in anticipation of future + `.read` calls. If reading the entire file, pre-fetching can + dramatically improve the download speed by avoiding roundtrip latency. + The file's contents are incrementally buffered in a background thread. + + The prefetched data is stored in a buffer until read via the `.read` + method. Once data has been read, it's removed from the buffer. The + data may be read in a random order (using `.seek`); chunks of the + buffer that haven't been read will continue to be buffered. + + :param int file_size: + When this is ``None`` (the default), this method calls `stat` to + determine the remote file size. In some situations, doing so can + cause exceptions or hangs (see `#562 + `_); as a + workaround, one may call `stat` explicitly and pass its value in + via this parameter. + :param int max_concurrent_requests: + The maximum number of concurrent read requests to prefetch. See + `.SFTPClient.get` (its ``max_concurrent_prefetch_requests`` param) + for details. + + .. versionadded:: 1.5.1 + .. versionchanged:: 1.16.0 + The ``file_size`` parameter was added (with no default value). + .. versionchanged:: 1.16.1 + The ``file_size`` parameter was made optional for backwards + compatibility. + .. versionchanged:: 3.3 + Added ``max_concurrent_requests``. + """ + if file_size is None: + file_size = self.stat().st_size + + # queue up async reads for the rest of the file + chunks = [] + n = self._realpos + while n < file_size: + chunk = min(self.MAX_REQUEST_SIZE, file_size - n) + chunks.append((n, chunk)) + n += chunk + if len(chunks) > 0: + self._start_prefetch(chunks, max_concurrent_requests) + + def readv(self, chunks, max_concurrent_prefetch_requests=None): + """ + Read a set of blocks from the file by (offset, length). This is more + efficient than doing a series of `.seek` and `.read` calls, since the + prefetch machinery is used to retrieve all the requested blocks at + once. + + :param chunks: + a list of ``(offset, length)`` tuples indicating which sections of + the file to read + :param int max_concurrent_prefetch_requests: + The maximum number of concurrent read requests to prefetch. See + `.SFTPClient.get` (its ``max_concurrent_prefetch_requests`` param) + for details. + :return: a list of blocks read, in the same order as in ``chunks`` + + .. versionadded:: 1.5.4 + .. versionchanged:: 3.3 + Added ``max_concurrent_prefetch_requests``. + """ + self.sftp._log( + DEBUG, "readv({}, {!r})".format(hexlify(self.handle), chunks) + ) + + read_chunks = [] + for offset, size in chunks: + # don't fetch data that's already in the prefetch buffer + if self._data_in_prefetch_buffers( + offset + ) or self._data_in_prefetch_requests(offset, size): + continue + + # break up anything larger than the max read size + while size > 0: + chunk_size = min(size, self.MAX_REQUEST_SIZE) + read_chunks.append((offset, chunk_size)) + offset += chunk_size + size -= chunk_size + + self._start_prefetch(read_chunks, max_concurrent_prefetch_requests) + # now we can just devolve to a bunch of read()s :) + for x in chunks: + self.seek(x[0]) + yield self.read(x[1]) + + # ...internals... + + def _get_size(self): + try: + return self.stat().st_size + except: + return 0 + + def _start_prefetch(self, chunks, max_concurrent_requests=None): + self._prefetching = True + self._prefetch_done = False + + t = threading.Thread( + target=self._prefetch_thread, + args=(chunks, max_concurrent_requests), + ) + t.daemon = True + t.start() + + def _prefetch_thread(self, chunks, max_concurrent_requests): + # do these read requests in a temporary thread because there may be + # a lot of them, so it may block. + for offset, length in chunks: + # Limit the number of concurrent requests in a busy-loop + if max_concurrent_requests is not None: + while True: + with self._prefetch_lock: + pf_len = len(self._prefetch_extents) + if pf_len < max_concurrent_requests: + break + time.sleep(io_sleep) + + num = self.sftp._async_request( + self, CMD_READ, self.handle, int64(offset), int(length) + ) + with self._prefetch_lock: + self._prefetch_extents[num] = (offset, length) + + def _async_response(self, t, msg, num): + if t == CMD_STATUS: + # save exception and re-raise it on next file operation + try: + self.sftp._convert_status(msg) + except Exception as e: + self._saved_exception = e + return + if t != CMD_DATA: + raise SFTPError("Expected data") + data = msg.get_string() + while True: + with self._prefetch_lock: + # spin if in race with _prefetch_thread + if num in self._prefetch_extents: + offset, length = self._prefetch_extents[num] + self._prefetch_data[offset] = data + del self._prefetch_extents[num] + if len(self._prefetch_extents) == 0: + self._prefetch_done = True + break + + def _check_exception(self): + """if there's a saved exception, raise & clear it""" + if self._saved_exception is not None: + x = self._saved_exception + self._saved_exception = None + raise x diff --git a/.venv/lib/python3.9/site-packages/paramiko/sftp_handle.py b/.venv/lib/python3.9/site-packages/paramiko/sftp_handle.py new file mode 100644 index 0000000..b204652 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/sftp_handle.py @@ -0,0 +1,196 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Abstraction of an SFTP file handle (for server mode). +""" + +import os +from paramiko.sftp import SFTP_OP_UNSUPPORTED, SFTP_OK +from paramiko.util import ClosingContextManager + + +class SFTPHandle(ClosingContextManager): + """ + Abstract object representing a handle to an open file (or folder) in an + SFTP server implementation. Each handle has a string representation used + by the client to refer to the underlying file. + + Server implementations can (and should) subclass SFTPHandle to implement + features of a file handle, like `stat` or `chattr`. + + Instances of this class may be used as context managers. + """ + + def __init__(self, flags=0): + """ + Create a new file handle representing a local file being served over + SFTP. If ``flags`` is passed in, it's used to determine if the file + is open in append mode. + + :param int flags: optional flags as passed to + `.SFTPServerInterface.open` + """ + self.__flags = flags + self.__name = None + # only for handles to folders: + self.__files = {} + self.__tell = None + + def close(self): + """ + When a client closes a file, this method is called on the handle. + Normally you would use this method to close the underlying OS level + file object(s). + + The default implementation checks for attributes on ``self`` named + ``readfile`` and/or ``writefile``, and if either or both are present, + their ``close()`` methods are called. This means that if you are + using the default implementations of `read` and `write`, this + method's default implementation should be fine also. + """ + readfile = getattr(self, "readfile", None) + if readfile is not None: + readfile.close() + writefile = getattr(self, "writefile", None) + if writefile is not None: + writefile.close() + + def read(self, offset, length): + """ + Read up to ``length`` bytes from this file, starting at position + ``offset``. The offset may be a Python long, since SFTP allows it + to be 64 bits. + + If the end of the file has been reached, this method may return an + empty string to signify EOF, or it may also return ``SFTP_EOF``. + + The default implementation checks for an attribute on ``self`` named + ``readfile``, and if present, performs the read operation on the Python + file-like object found there. (This is meant as a time saver for the + common case where you are wrapping a Python file object.) + + :param offset: position in the file to start reading from. + :param int length: number of bytes to attempt to read. + :return: the `bytes` read, or an error code `int`. + """ + readfile = getattr(self, "readfile", None) + if readfile is None: + return SFTP_OP_UNSUPPORTED + try: + if self.__tell is None: + self.__tell = readfile.tell() + if offset != self.__tell: + readfile.seek(offset) + self.__tell = offset + data = readfile.read(length) + except IOError as e: + self.__tell = None + return SFTPServer.convert_errno(e.errno) + self.__tell += len(data) + return data + + def write(self, offset, data): + """ + Write ``data`` into this file at position ``offset``. Extending the + file past its original end is expected. Unlike Python's normal + ``write()`` methods, this method cannot do a partial write: it must + write all of ``data`` or else return an error. + + The default implementation checks for an attribute on ``self`` named + ``writefile``, and if present, performs the write operation on the + Python file-like object found there. The attribute is named + differently from ``readfile`` to make it easy to implement read-only + (or write-only) files, but if both attributes are present, they should + refer to the same file. + + :param offset: position in the file to start reading from. + :param bytes data: data to write into the file. + :return: an SFTP error code like ``SFTP_OK``. + """ + writefile = getattr(self, "writefile", None) + if writefile is None: + return SFTP_OP_UNSUPPORTED + try: + # in append mode, don't care about seeking + if (self.__flags & os.O_APPEND) == 0: + if self.__tell is None: + self.__tell = writefile.tell() + if offset != self.__tell: + writefile.seek(offset) + self.__tell = offset + writefile.write(data) + writefile.flush() + except IOError as e: + self.__tell = None + return SFTPServer.convert_errno(e.errno) + if self.__tell is not None: + self.__tell += len(data) + return SFTP_OK + + def stat(self): + """ + Return an `.SFTPAttributes` object referring to this open file, or an + error code. This is equivalent to `.SFTPServerInterface.stat`, except + it's called on an open file instead of a path. + + :return: + an attributes object for the given file, or an SFTP error code + (like ``SFTP_PERMISSION_DENIED``). + :rtype: `.SFTPAttributes` or error code + """ + return SFTP_OP_UNSUPPORTED + + def chattr(self, attr): + """ + Change the attributes of this file. The ``attr`` object will contain + only those fields provided by the client in its request, so you should + check for the presence of fields before using them. + + :param .SFTPAttributes attr: the attributes to change on this file. + :return: an `int` error code like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + # ...internals... + + def _set_files(self, files): + """ + Used by the SFTP server code to cache a directory listing. (In + the SFTP protocol, listing a directory is a multi-stage process + requiring a temporary handle.) + """ + self.__files = files + + def _get_next_files(self): + """ + Used by the SFTP server code to retrieve a cached directory + listing. + """ + fnlist = self.__files[:16] + self.__files = self.__files[16:] + return fnlist + + def _get_name(self): + return self.__name + + def _set_name(self, name): + self.__name = name + + +from paramiko.sftp_server import SFTPServer diff --git a/.venv/lib/python3.9/site-packages/paramiko/sftp_server.py b/.venv/lib/python3.9/site-packages/paramiko/sftp_server.py new file mode 100644 index 0000000..cd3910d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/sftp_server.py @@ -0,0 +1,537 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Server-mode SFTP support. +""" + +import os +import errno +import sys +from hashlib import md5, sha1 + +from paramiko import util +from paramiko.sftp import ( + BaseSFTP, + Message, + SFTP_FAILURE, + SFTP_PERMISSION_DENIED, + SFTP_NO_SUCH_FILE, + int64, +) +from paramiko.sftp_si import SFTPServerInterface +from paramiko.sftp_attr import SFTPAttributes +from paramiko.common import DEBUG +from paramiko.server import SubsystemHandler +from paramiko.util import b + + +# known hash algorithms for the "check-file" extension +from paramiko.sftp import ( + CMD_HANDLE, + SFTP_DESC, + CMD_STATUS, + SFTP_EOF, + CMD_NAME, + SFTP_BAD_MESSAGE, + CMD_EXTENDED_REPLY, + SFTP_FLAG_READ, + SFTP_FLAG_WRITE, + SFTP_FLAG_APPEND, + SFTP_FLAG_CREATE, + SFTP_FLAG_TRUNC, + SFTP_FLAG_EXCL, + CMD_NAMES, + CMD_OPEN, + CMD_CLOSE, + SFTP_OK, + CMD_READ, + CMD_DATA, + CMD_WRITE, + CMD_REMOVE, + CMD_RENAME, + CMD_MKDIR, + CMD_RMDIR, + CMD_OPENDIR, + CMD_READDIR, + CMD_STAT, + CMD_ATTRS, + CMD_LSTAT, + CMD_FSTAT, + CMD_SETSTAT, + CMD_FSETSTAT, + CMD_READLINK, + CMD_SYMLINK, + CMD_REALPATH, + CMD_EXTENDED, + SFTP_OP_UNSUPPORTED, +) + +_hash_class = {"sha1": sha1, "md5": md5} + + +class SFTPServer(BaseSFTP, SubsystemHandler): + """ + Server-side SFTP subsystem support. Since this is a `.SubsystemHandler`, + it can be (and is meant to be) set as the handler for ``"sftp"`` requests. + Use `.Transport.set_subsystem_handler` to activate this class. + """ + + def __init__( + self, + channel, + name, + server, + sftp_si=SFTPServerInterface, + *args, + **kwargs + ): + """ + The constructor for SFTPServer is meant to be called from within the + `.Transport` as a subsystem handler. ``server`` and any additional + parameters or keyword parameters are passed from the original call to + `.Transport.set_subsystem_handler`. + + :param .Channel channel: channel passed from the `.Transport`. + :param str name: name of the requested subsystem. + :param .ServerInterface server: + the server object associated with this channel and subsystem + :param sftp_si: + a subclass of `.SFTPServerInterface` to use for handling individual + requests. + """ + BaseSFTP.__init__(self) + SubsystemHandler.__init__(self, channel, name, server) + transport = channel.get_transport() + self.logger = util.get_logger(transport.get_log_channel() + ".sftp") + self.ultra_debug = transport.get_hexdump() + self.next_handle = 1 + # map of handle-string to SFTPHandle for files & folders: + self.file_table = {} + self.folder_table = {} + self.server = sftp_si(server, *args, **kwargs) + + def _log(self, level, msg): + if issubclass(type(msg), list): + for m in msg: + super()._log(level, "[chan " + self.sock.get_name() + "] " + m) + else: + super()._log(level, "[chan " + self.sock.get_name() + "] " + msg) + + def start_subsystem(self, name, transport, channel): + self.sock = channel + self._log(DEBUG, "Started sftp server on channel {!r}".format(channel)) + self._send_server_version() + self.server.session_started() + while True: + try: + t, data = self._read_packet() + except EOFError: + self._log(DEBUG, "EOF -- end of session") + return + except Exception as e: + self._log(DEBUG, "Exception on channel: " + str(e)) + self._log(DEBUG, util.tb_strings()) + return + msg = Message(data) + request_number = msg.get_int() + try: + self._process(t, request_number, msg) + except Exception as e: + self._log(DEBUG, "Exception in server processing: " + str(e)) + self._log(DEBUG, util.tb_strings()) + # send some kind of failure message, at least + try: + self._send_status(request_number, SFTP_FAILURE) + except: + pass + + def finish_subsystem(self): + self.server.session_ended() + super().finish_subsystem() + # close any file handles that were left open + # (so we can return them to the OS quickly) + for f in self.file_table.values(): + f.close() + for f in self.folder_table.values(): + f.close() + self.file_table = {} + self.folder_table = {} + + @staticmethod + def convert_errno(e): + """ + Convert an errno value (as from an ``OSError`` or ``IOError``) into a + standard SFTP result code. This is a convenience function for trapping + exceptions in server code and returning an appropriate result. + + :param int e: an errno code, as from ``OSError.errno``. + :return: an `int` SFTP error code like ``SFTP_NO_SUCH_FILE``. + """ + if e == errno.EACCES: + # permission denied + return SFTP_PERMISSION_DENIED + elif (e == errno.ENOENT) or (e == errno.ENOTDIR): + # no such file + return SFTP_NO_SUCH_FILE + else: + return SFTP_FAILURE + + @staticmethod + def set_file_attr(filename, attr): + """ + Change a file's attributes on the local filesystem. The contents of + ``attr`` are used to change the permissions, owner, group ownership, + and/or modification & access time of the file, depending on which + attributes are present in ``attr``. + + This is meant to be a handy helper function for translating SFTP file + requests into local file operations. + + :param str filename: + name of the file to alter (should usually be an absolute path). + :param .SFTPAttributes attr: attributes to change. + """ + if sys.platform != "win32": + # mode operations are meaningless on win32 + if attr._flags & attr.FLAG_PERMISSIONS: + os.chmod(filename, attr.st_mode) + if attr._flags & attr.FLAG_UIDGID: + os.chown(filename, attr.st_uid, attr.st_gid) + if attr._flags & attr.FLAG_AMTIME: + os.utime(filename, (attr.st_atime, attr.st_mtime)) + if attr._flags & attr.FLAG_SIZE: + with open(filename, "w+") as f: + f.truncate(attr.st_size) + + # ...internals... + + def _response(self, request_number, t, *args): + msg = Message() + msg.add_int(request_number) + for item in args: + # NOTE: this is a very silly tiny class used for SFTPFile mostly + if isinstance(item, int64): + msg.add_int64(item) + elif isinstance(item, int): + msg.add_int(item) + elif isinstance(item, (str, bytes)): + msg.add_string(item) + elif type(item) is SFTPAttributes: + item._pack(msg) + else: + raise Exception( + "unknown type for {!r} type {!r}".format(item, type(item)) + ) + self._send_packet(t, msg) + + def _send_handle_response(self, request_number, handle, folder=False): + if not issubclass(type(handle), SFTPHandle): + # must be error code + self._send_status(request_number, handle) + return + handle._set_name(b("hx{:d}".format(self.next_handle))) + self.next_handle += 1 + if folder: + self.folder_table[handle._get_name()] = handle + else: + self.file_table[handle._get_name()] = handle + self._response(request_number, CMD_HANDLE, handle._get_name()) + + def _send_status(self, request_number, code, desc=None): + if desc is None: + try: + desc = SFTP_DESC[code] + except IndexError: + desc = "Unknown" + # some clients expect a "language" tag at the end + # (but don't mind it being blank) + self._response(request_number, CMD_STATUS, code, desc, "") + + def _open_folder(self, request_number, path): + resp = self.server.list_folder(path) + if issubclass(type(resp), list): + # got an actual list of filenames in the folder + folder = SFTPHandle() + folder._set_files(resp) + self._send_handle_response(request_number, folder, True) + return + # must be an error code + self._send_status(request_number, resp) + + def _read_folder(self, request_number, folder): + flist = folder._get_next_files() + if len(flist) == 0: + self._send_status(request_number, SFTP_EOF) + return + msg = Message() + msg.add_int(request_number) + msg.add_int(len(flist)) + for attr in flist: + msg.add_string(attr.filename) + msg.add_string(attr) + attr._pack(msg) + self._send_packet(CMD_NAME, msg) + + def _check_file(self, request_number, msg): + # this extension actually comes from v6 protocol, but since it's an + # extension, i feel like we can reasonably support it backported. + # it's very useful for verifying uploaded files or checking for + # rsync-like differences between local and remote files. + handle = msg.get_binary() + alg_list = msg.get_list() + start = msg.get_int64() + length = msg.get_int64() + block_size = msg.get_int() + if handle not in self.file_table: + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + f = self.file_table[handle] + for x in alg_list: + if x in _hash_class: + algname = x + alg = _hash_class[x] + break + else: + self._send_status( + request_number, SFTP_FAILURE, "No supported hash types found" + ) + return + if length == 0: + st = f.stat() + if not issubclass(type(st), SFTPAttributes): + self._send_status(request_number, st, "Unable to stat file") + return + length = st.st_size - start + if block_size == 0: + block_size = length + if block_size < 256: + self._send_status( + request_number, SFTP_FAILURE, "Block size too small" + ) + return + + sum_out = bytes() + offset = start + while offset < start + length: + blocklen = min(block_size, start + length - offset) + # don't try to read more than about 64KB at a time + chunklen = min(blocklen, 65536) + count = 0 + hash_obj = alg() + while count < blocklen: + data = f.read(offset, chunklen) + if not isinstance(data, bytes): + self._send_status( + request_number, data, "Unable to hash file" + ) + return + hash_obj.update(data) + count += len(data) + offset += count + sum_out += hash_obj.digest() + + msg = Message() + msg.add_int(request_number) + msg.add_string("check-file") + msg.add_string(algname) + msg.add_bytes(sum_out) + self._send_packet(CMD_EXTENDED_REPLY, msg) + + def _convert_pflags(self, pflags): + """convert SFTP-style open() flags to Python's os.open() flags""" + if (pflags & SFTP_FLAG_READ) and (pflags & SFTP_FLAG_WRITE): + flags = os.O_RDWR + elif pflags & SFTP_FLAG_WRITE: + flags = os.O_WRONLY + else: + flags = os.O_RDONLY + if pflags & SFTP_FLAG_APPEND: + flags |= os.O_APPEND + if pflags & SFTP_FLAG_CREATE: + flags |= os.O_CREAT + if pflags & SFTP_FLAG_TRUNC: + flags |= os.O_TRUNC + if pflags & SFTP_FLAG_EXCL: + flags |= os.O_EXCL + return flags + + def _process(self, t, request_number, msg): + self._log(DEBUG, "Request: {}".format(CMD_NAMES[t])) + if t == CMD_OPEN: + path = msg.get_text() + flags = self._convert_pflags(msg.get_int()) + attr = SFTPAttributes._from_msg(msg) + self._send_handle_response( + request_number, self.server.open(path, flags, attr) + ) + elif t == CMD_CLOSE: + handle = msg.get_binary() + if handle in self.folder_table: + del self.folder_table[handle] + self._send_status(request_number, SFTP_OK) + return + if handle in self.file_table: + self.file_table[handle].close() + del self.file_table[handle] + self._send_status(request_number, SFTP_OK) + return + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + elif t == CMD_READ: + handle = msg.get_binary() + offset = msg.get_int64() + length = msg.get_int() + if handle not in self.file_table: + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + data = self.file_table[handle].read(offset, length) + if isinstance(data, (bytes, str)): + if len(data) == 0: + self._send_status(request_number, SFTP_EOF) + else: + self._response(request_number, CMD_DATA, data) + else: + self._send_status(request_number, data) + elif t == CMD_WRITE: + handle = msg.get_binary() + offset = msg.get_int64() + data = msg.get_binary() + if handle not in self.file_table: + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + self._send_status( + request_number, self.file_table[handle].write(offset, data) + ) + elif t == CMD_REMOVE: + path = msg.get_text() + self._send_status(request_number, self.server.remove(path)) + elif t == CMD_RENAME: + oldpath = msg.get_text() + newpath = msg.get_text() + self._send_status( + request_number, self.server.rename(oldpath, newpath) + ) + elif t == CMD_MKDIR: + path = msg.get_text() + attr = SFTPAttributes._from_msg(msg) + self._send_status(request_number, self.server.mkdir(path, attr)) + elif t == CMD_RMDIR: + path = msg.get_text() + self._send_status(request_number, self.server.rmdir(path)) + elif t == CMD_OPENDIR: + path = msg.get_text() + self._open_folder(request_number, path) + return + elif t == CMD_READDIR: + handle = msg.get_binary() + if handle not in self.folder_table: + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + folder = self.folder_table[handle] + self._read_folder(request_number, folder) + elif t == CMD_STAT: + path = msg.get_text() + resp = self.server.stat(path) + if issubclass(type(resp), SFTPAttributes): + self._response(request_number, CMD_ATTRS, resp) + else: + self._send_status(request_number, resp) + elif t == CMD_LSTAT: + path = msg.get_text() + resp = self.server.lstat(path) + if issubclass(type(resp), SFTPAttributes): + self._response(request_number, CMD_ATTRS, resp) + else: + self._send_status(request_number, resp) + elif t == CMD_FSTAT: + handle = msg.get_binary() + if handle not in self.file_table: + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + resp = self.file_table[handle].stat() + if issubclass(type(resp), SFTPAttributes): + self._response(request_number, CMD_ATTRS, resp) + else: + self._send_status(request_number, resp) + elif t == CMD_SETSTAT: + path = msg.get_text() + attr = SFTPAttributes._from_msg(msg) + self._send_status(request_number, self.server.chattr(path, attr)) + elif t == CMD_FSETSTAT: + handle = msg.get_binary() + attr = SFTPAttributes._from_msg(msg) + if handle not in self.file_table: + self._response( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + self._send_status( + request_number, self.file_table[handle].chattr(attr) + ) + elif t == CMD_READLINK: + path = msg.get_text() + resp = self.server.readlink(path) + if isinstance(resp, (bytes, str)): + self._response( + request_number, CMD_NAME, 1, resp, "", SFTPAttributes() + ) + else: + self._send_status(request_number, resp) + elif t == CMD_SYMLINK: + # the sftp 2 draft is incorrect here! + # path always follows target_path + target_path = msg.get_text() + path = msg.get_text() + self._send_status( + request_number, self.server.symlink(target_path, path) + ) + elif t == CMD_REALPATH: + path = msg.get_text() + rpath = self.server.canonicalize(path) + self._response( + request_number, CMD_NAME, 1, rpath, "", SFTPAttributes() + ) + elif t == CMD_EXTENDED: + tag = msg.get_text() + if tag == "check-file": + self._check_file(request_number, msg) + elif tag == "posix-rename@openssh.com": + oldpath = msg.get_text() + newpath = msg.get_text() + self._send_status( + request_number, self.server.posix_rename(oldpath, newpath) + ) + else: + self._send_status(request_number, SFTP_OP_UNSUPPORTED) + else: + self._send_status(request_number, SFTP_OP_UNSUPPORTED) + + +from paramiko.sftp_handle import SFTPHandle diff --git a/.venv/lib/python3.9/site-packages/paramiko/sftp_si.py b/.venv/lib/python3.9/site-packages/paramiko/sftp_si.py new file mode 100644 index 0000000..72b5db9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/sftp_si.py @@ -0,0 +1,316 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +An interface to override for SFTP server support. +""" + +import os +import sys +from paramiko.sftp import SFTP_OP_UNSUPPORTED + + +class SFTPServerInterface: + """ + This class defines an interface for controlling the behavior of paramiko + when using the `.SFTPServer` subsystem to provide an SFTP server. + + Methods on this class are called from the SFTP session's thread, so you can + block as long as necessary without affecting other sessions (even other + SFTP sessions). However, raising an exception will usually cause the SFTP + session to abruptly end, so you will usually want to catch exceptions and + return an appropriate error code. + + All paths are in string form instead of unicode because not all SFTP + clients & servers obey the requirement that paths be encoded in UTF-8. + """ + + def __init__(self, server, *args, **kwargs): + """ + Create a new SFTPServerInterface object. This method does nothing by + default and is meant to be overridden by subclasses. + + :param .ServerInterface server: + the server object associated with this channel and SFTP subsystem + """ + super().__init__(*args, **kwargs) + + def session_started(self): + """ + The SFTP server session has just started. This method is meant to be + overridden to perform any necessary setup before handling callbacks + from SFTP operations. + """ + pass + + def session_ended(self): + """ + The SFTP server session has just ended, either cleanly or via an + exception. This method is meant to be overridden to perform any + necessary cleanup before this `.SFTPServerInterface` object is + destroyed. + """ + pass + + def open(self, path, flags, attr): + """ + Open a file on the server and create a handle for future operations + on that file. On success, a new object subclassed from `.SFTPHandle` + should be returned. This handle will be used for future operations + on the file (read, write, etc). On failure, an error code such as + ``SFTP_PERMISSION_DENIED`` should be returned. + + ``flags`` contains the requested mode for opening (read-only, + write-append, etc) as a bitset of flags from the ``os`` module: + + - ``os.O_RDONLY`` + - ``os.O_WRONLY`` + - ``os.O_RDWR`` + - ``os.O_APPEND`` + - ``os.O_CREAT`` + - ``os.O_TRUNC`` + - ``os.O_EXCL`` + + (One of ``os.O_RDONLY``, ``os.O_WRONLY``, or ``os.O_RDWR`` will always + be set.) + + The ``attr`` object contains requested attributes of the file if it + has to be created. Some or all attribute fields may be missing if + the client didn't specify them. + + .. note:: The SFTP protocol defines all files to be in "binary" mode. + There is no equivalent to Python's "text" mode. + + :param str path: + the requested path (relative or absolute) of the file to be opened. + :param int flags: + flags or'd together from the ``os`` module indicating the requested + mode for opening the file. + :param .SFTPAttributes attr: + requested attributes of the file if it is newly created. + :return: a new `.SFTPHandle` or error code. + """ + return SFTP_OP_UNSUPPORTED + + def list_folder(self, path): + """ + Return a list of files within a given folder. The ``path`` will use + posix notation (``"/"`` separates folder names) and may be an absolute + or relative path. + + The list of files is expected to be a list of `.SFTPAttributes` + objects, which are similar in structure to the objects returned by + ``os.stat``. In addition, each object should have its ``filename`` + field filled in, since this is important to a directory listing and + not normally present in ``os.stat`` results. The method + `.SFTPAttributes.from_stat` will usually do what you want. + + In case of an error, you should return one of the ``SFTP_*`` error + codes, such as ``SFTP_PERMISSION_DENIED``. + + :param str path: the requested path (relative or absolute) to be + listed. + :return: + a list of the files in the given folder, using `.SFTPAttributes` + objects. + + .. note:: + You should normalize the given ``path`` first (see the `os.path` + module) and check appropriate permissions before returning the list + of files. Be careful of malicious clients attempting to use + relative paths to escape restricted folders, if you're doing a + direct translation from the SFTP server path to your local + filesystem. + """ + return SFTP_OP_UNSUPPORTED + + def stat(self, path): + """ + Return an `.SFTPAttributes` object for a path on the server, or an + error code. If your server supports symbolic links (also known as + "aliases"), you should follow them. (`lstat` is the corresponding + call that doesn't follow symlinks/aliases.) + + :param str path: + the requested path (relative or absolute) to fetch file statistics + for. + :return: + an `.SFTPAttributes` object for the given file, or an SFTP error + code (like ``SFTP_PERMISSION_DENIED``). + """ + return SFTP_OP_UNSUPPORTED + + def lstat(self, path): + """ + Return an `.SFTPAttributes` object for a path on the server, or an + error code. If your server supports symbolic links (also known as + "aliases"), you should not follow them -- instead, you should + return data on the symlink or alias itself. (`stat` is the + corresponding call that follows symlinks/aliases.) + + :param str path: + the requested path (relative or absolute) to fetch file statistics + for. + :type path: str + :return: + an `.SFTPAttributes` object for the given file, or an SFTP error + code (like ``SFTP_PERMISSION_DENIED``). + """ + return SFTP_OP_UNSUPPORTED + + def remove(self, path): + """ + Delete a file, if possible. + + :param str path: + the requested path (relative or absolute) of the file to delete. + :return: an SFTP error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def rename(self, oldpath, newpath): + """ + Rename (or move) a file. The SFTP specification implies that this + method can be used to move an existing file into a different folder, + and since there's no other (easy) way to move files via SFTP, it's + probably a good idea to implement "move" in this method too, even for + files that cross disk partition boundaries, if at all possible. + + .. note:: You should return an error if a file with the same name as + ``newpath`` already exists. (The rename operation should be + non-desctructive.) + + .. note:: + This method implements 'standard' SFTP ``RENAME`` behavior; those + seeking the OpenSSH "POSIX rename" extension behavior should use + `posix_rename`. + + :param str oldpath: + the requested path (relative or absolute) of the existing file. + :param str newpath: the requested new path of the file. + :return: an SFTP error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def posix_rename(self, oldpath, newpath): + """ + Rename (or move) a file, following posix conventions. If newpath + already exists, it will be overwritten. + + :param str oldpath: + the requested path (relative or absolute) of the existing file. + :param str newpath: the requested new path of the file. + :return: an SFTP error code `int` like ``SFTP_OK``. + + :versionadded: 2.2 + """ + return SFTP_OP_UNSUPPORTED + + def mkdir(self, path, attr): + """ + Create a new directory with the given attributes. The ``attr`` + object may be considered a "hint" and ignored. + + The ``attr`` object will contain only those fields provided by the + client in its request, so you should use ``hasattr`` to check for + the presence of fields before using them. In some cases, the ``attr`` + object may be completely empty. + + :param str path: + requested path (relative or absolute) of the new folder. + :param .SFTPAttributes attr: requested attributes of the new folder. + :return: an SFTP error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def rmdir(self, path): + """ + Remove a directory if it exists. The ``path`` should refer to an + existing, empty folder -- otherwise this method should return an + error. + + :param str path: + requested path (relative or absolute) of the folder to remove. + :return: an SFTP error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def chattr(self, path, attr): + """ + Change the attributes of a file. The ``attr`` object will contain + only those fields provided by the client in its request, so you + should check for the presence of fields before using them. + + :param str path: + requested path (relative or absolute) of the file to change. + :param attr: + requested attributes to change on the file (an `.SFTPAttributes` + object) + :return: an error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def canonicalize(self, path): + """ + Return the canonical form of a path on the server. For example, + if the server's home folder is ``/home/foo``, the path + ``"../betty"`` would be canonicalized to ``"/home/betty"``. Note + the obvious security issues: if you're serving files only from a + specific folder, you probably don't want this method to reveal path + names outside that folder. + + You may find the Python methods in ``os.path`` useful, especially + ``os.path.normpath`` and ``os.path.realpath``. + + The default implementation returns ``os.path.normpath('/' + path)``. + """ + if os.path.isabs(path): + out = os.path.normpath(path) + else: + out = os.path.normpath("/" + path) + if sys.platform == "win32": + # on windows, normalize backslashes to sftp/posix format + out = out.replace("\\", "/") + return out + + def readlink(self, path): + """ + Return the target of a symbolic link (or shortcut) on the server. + If the specified path doesn't refer to a symbolic link, an error + should be returned. + + :param str path: path (relative or absolute) of the symbolic link. + :return: + the target `str` path of the symbolic link, or an error code like + ``SFTP_NO_SUCH_FILE``. + """ + return SFTP_OP_UNSUPPORTED + + def symlink(self, target_path, path): + """ + Create a symbolic link on the server, as new pathname ``path``, + with ``target_path`` as the target of the link. + + :param str target_path: + path (relative or absolute) of the target for this new symbolic + link. + :param str path: + path (relative or absolute) of the symbolic link to create. + :return: an error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED diff --git a/.venv/lib/python3.9/site-packages/paramiko/ssh_exception.py b/.venv/lib/python3.9/site-packages/paramiko/ssh_exception.py new file mode 100644 index 0000000..2b68ebe --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/ssh_exception.py @@ -0,0 +1,250 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import socket + + +class SSHException(Exception): + """ + Exception raised by failures in SSH2 protocol negotiation or logic errors. + """ + + pass + + +class AuthenticationException(SSHException): + """ + Exception raised when authentication failed for some reason. It may be + possible to retry with different credentials. (Other classes specify more + specific reasons.) + + .. versionadded:: 1.6 + """ + + pass + + +class PasswordRequiredException(AuthenticationException): + """ + Exception raised when a password is needed to unlock a private key file. + """ + + pass + + +class BadAuthenticationType(AuthenticationException): + """ + Exception raised when an authentication type (like password) is used, but + the server isn't allowing that type. (It may only allow public-key, for + example.) + + .. versionadded:: 1.1 + """ + + allowed_types = [] + + # TODO 4.0: remove explanation kwarg + def __init__(self, explanation, types): + # TODO 4.0: remove this supercall unless it's actually required for + # pickling (after fixing pickling) + AuthenticationException.__init__(self, explanation, types) + self.explanation = explanation + self.allowed_types = types + + def __str__(self): + return "{}; allowed types: {!r}".format( + self.explanation, self.allowed_types + ) + + +class PartialAuthentication(AuthenticationException): + """ + An internal exception thrown in the case of partial authentication. + """ + + allowed_types = [] + + def __init__(self, types): + AuthenticationException.__init__(self, types) + self.allowed_types = types + + def __str__(self): + return "Partial authentication; allowed types: {!r}".format( + self.allowed_types + ) + + +# TODO 4.0: stop inheriting from SSHException, move to auth.py +class UnableToAuthenticate(AuthenticationException): + pass + + +class ChannelException(SSHException): + """ + Exception raised when an attempt to open a new `.Channel` fails. + + :param int code: the error code returned by the server + + .. versionadded:: 1.6 + """ + + def __init__(self, code, text): + SSHException.__init__(self, code, text) + self.code = code + self.text = text + + def __str__(self): + return "ChannelException({!r}, {!r})".format(self.code, self.text) + + +class BadHostKeyException(SSHException): + """ + The host key given by the SSH server did not match what we were expecting. + + :param str hostname: the hostname of the SSH server + :param PKey got_key: the host key presented by the server + :param PKey expected_key: the host key expected + + .. versionadded:: 1.6 + """ + + def __init__(self, hostname, got_key, expected_key): + SSHException.__init__(self, hostname, got_key, expected_key) + self.hostname = hostname + self.key = got_key + self.expected_key = expected_key + + def __str__(self): + msg = "Host key for server '{}' does not match: got '{}', expected '{}'" # noqa + return msg.format( + self.hostname, + self.key.get_base64(), + self.expected_key.get_base64(), + ) + + +class IncompatiblePeer(SSHException): + """ + A disagreement arose regarding an algorithm required for key exchange. + + .. versionadded:: 2.9 + """ + + # TODO 4.0: consider making this annotate w/ 1..N 'missing' algorithms, + # either just the first one that would halt kex, or even updating the + # Transport logic so we record /all/ that /could/ halt kex. + # TODO: update docstrings where this may end up raised so they are more + # specific. + pass + + +class ProxyCommandFailure(SSHException): + """ + The "ProxyCommand" found in the .ssh/config file returned an error. + + :param str command: The command line that is generating this exception. + :param str error: The error captured from the proxy command output. + """ + + def __init__(self, command, error): + SSHException.__init__(self, command, error) + self.command = command + self.error = error + + def __str__(self): + return 'ProxyCommand("{}") returned nonzero exit status: {}'.format( + self.command, self.error + ) + + +class NoValidConnectionsError(socket.error): + """ + Multiple connection attempts were made and no families succeeded. + + This exception class wraps multiple "real" underlying connection errors, + all of which represent failed connection attempts. Because these errors are + not guaranteed to all be of the same error type (i.e. different errno, + `socket.error` subclass, message, etc) we expose a single unified error + message and a ``None`` errno so that instances of this class match most + normal handling of `socket.error` objects. + + To see the wrapped exception objects, access the ``errors`` attribute. + ``errors`` is a dict whose keys are address tuples (e.g. ``('127.0.0.1', + 22)``) and whose values are the exception encountered trying to connect to + that address. + + It is implied/assumed that all the errors given to a single instance of + this class are from connecting to the same hostname + port (and thus that + the differences are in the resolution of the hostname - e.g. IPv4 vs v6). + + .. versionadded:: 1.16 + """ + + def __init__(self, errors): + """ + :param dict errors: + The errors dict to store, as described by class docstring. + """ + addrs = sorted(errors.keys()) + body = ", ".join([x[0] for x in addrs[:-1]]) + tail = addrs[-1][0] + if body: + msg = "Unable to connect to port {0} on {1} or {2}" + else: + msg = "Unable to connect to port {0} on {2}" + super().__init__( + None, msg.format(addrs[0][1], body, tail) # stand-in for errno + ) + self.errors = errors + + def __reduce__(self): + return (self.__class__, (self.errors,)) + + +class CouldNotCanonicalize(SSHException): + """ + Raised when hostname canonicalization fails & fallback is disabled. + + .. versionadded:: 2.7 + """ + + pass + + +class ConfigParseError(SSHException): + """ + A fatal error was encountered trying to parse SSH config data. + + Typically this means a config file violated the ``ssh_config`` + specification in a manner that requires exiting immediately, such as not + matching ``key = value`` syntax or misusing certain ``Match`` keywords. + + .. versionadded:: 2.7 + """ + + pass + + +class MessageOrderError(SSHException): + """ + Out-of-order protocol messages were received, violating "strict kex" mode. + + .. versionadded:: 3.4 + """ + + pass diff --git a/.venv/lib/python3.9/site-packages/paramiko/ssh_gss.py b/.venv/lib/python3.9/site-packages/paramiko/ssh_gss.py new file mode 100644 index 0000000..30a2541 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/ssh_gss.py @@ -0,0 +1,772 @@ +# Copyright (C) 2013-2014 science + computing ag +# Author: Sebastian Deiss +# +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +""" +This module provides GSS-API / SSPI authentication as defined in :rfc:`4462`. + +.. note:: Credential delegation is not supported in server mode. + +.. seealso:: :doc:`/api/kex_gss` + +.. versionadded:: 1.15 +""" + +import struct +import os +import sys + + +#: A boolean constraint that indicates if GSS-API / SSPI is available. +GSS_AUTH_AVAILABLE = True + + +#: A tuple of the exception types used by the underlying GSSAPI implementation. +GSS_EXCEPTIONS = () + + +#: :var str _API: Constraint for the used API +_API = None + +try: + import gssapi + + if hasattr(gssapi, "__title__") and gssapi.__title__ == "python-gssapi": + # old, unmaintained python-gssapi package + _API = "MIT" # keep this for compatibility + GSS_EXCEPTIONS = (gssapi.GSSException,) + else: + _API = "PYTHON-GSSAPI-NEW" + GSS_EXCEPTIONS = ( + gssapi.exceptions.GeneralError, + gssapi.raw.misc.GSSError, + ) +except (ImportError, OSError): + try: + import pywintypes + import sspicon + import sspi + + _API = "SSPI" + GSS_EXCEPTIONS = (pywintypes.error,) + except ImportError: + GSS_AUTH_AVAILABLE = False + _API = None + +from paramiko.common import MSG_USERAUTH_REQUEST +from paramiko.ssh_exception import SSHException + + +def GSSAuth(auth_method, gss_deleg_creds=True): + """ + Provide SSH2 GSS-API / SSPI authentication. + + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not. + We delegate credentials by default. + :return: Either an `._SSH_GSSAPI_OLD` or `._SSH_GSSAPI_NEW` (Unix) + object or an `_SSH_SSPI` (Windows) object + :rtype: object + + :raises: ``ImportError`` -- If no GSS-API / SSPI module could be imported. + + :see: `RFC 4462 `_ + :note: Check for the available API and return either an `._SSH_GSSAPI_OLD` + (MIT GSSAPI using python-gssapi package) object, an + `._SSH_GSSAPI_NEW` (MIT GSSAPI using gssapi package) object + or an `._SSH_SSPI` (MS SSPI) object. + If there is no supported API available, + ``None`` will be returned. + """ + if _API == "MIT": + return _SSH_GSSAPI_OLD(auth_method, gss_deleg_creds) + elif _API == "PYTHON-GSSAPI-NEW": + return _SSH_GSSAPI_NEW(auth_method, gss_deleg_creds) + elif _API == "SSPI" and os.name == "nt": + return _SSH_SSPI(auth_method, gss_deleg_creds) + else: + raise ImportError("Unable to import a GSS-API / SSPI module!") + + +class _SSH_GSSAuth: + """ + Contains the shared variables and methods of `._SSH_GSSAPI_OLD`, + `._SSH_GSSAPI_NEW` and `._SSH_SSPI`. + """ + + def __init__(self, auth_method, gss_deleg_creds): + """ + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not + """ + self._auth_method = auth_method + self._gss_deleg_creds = gss_deleg_creds + self._gss_host = None + self._username = None + self._session_id = None + self._service = "ssh-connection" + """ + OpenSSH supports Kerberos V5 mechanism only for GSS-API authentication, + so we also support the krb5 mechanism only. + """ + self._krb5_mech = "1.2.840.113554.1.2.2" + + # client mode + self._gss_ctxt = None + self._gss_ctxt_status = False + + # server mode + self._gss_srv_ctxt = None + self._gss_srv_ctxt_status = False + self.cc_file = None + + def set_service(self, service): + """ + This is just a setter to use a non default service. + I added this method, because RFC 4462 doesn't specify "ssh-connection" + as the only service value. + + :param str service: The desired SSH service + """ + if service.find("ssh-"): + self._service = service + + def set_username(self, username): + """ + Setter for C{username}. If GSS-API Key Exchange is performed, the + username is not set by C{ssh_init_sec_context}. + + :param str username: The name of the user who attempts to login + """ + self._username = username + + def ssh_gss_oids(self, mode="client"): + """ + This method returns a single OID, because we only support the + Kerberos V5 mechanism. + + :param str mode: Client for client mode and server for server mode + :return: A byte sequence containing the number of supported + OIDs, the length of the OID and the actual OID encoded with + DER + :note: In server mode we just return the OID length and the DER encoded + OID. + """ + from pyasn1.type.univ import ObjectIdentifier + from pyasn1.codec.der import encoder + + OIDs = self._make_uint32(1) + krb5_OID = encoder.encode(ObjectIdentifier(self._krb5_mech)) + OID_len = self._make_uint32(len(krb5_OID)) + if mode == "server": + return OID_len + krb5_OID + return OIDs + OID_len + krb5_OID + + def ssh_check_mech(self, desired_mech): + """ + Check if the given OID is the Kerberos V5 OID (server mode). + + :param str desired_mech: The desired GSS-API mechanism of the client + :return: ``True`` if the given OID is supported, otherwise C{False} + """ + from pyasn1.codec.der import decoder + + mech, __ = decoder.decode(desired_mech) + if mech.__str__() != self._krb5_mech: + return False + return True + + # Internals + # ------------------------------------------------------------------------- + def _make_uint32(self, integer): + """ + Create a 32 bit unsigned integer (The byte sequence of an integer). + + :param int integer: The integer value to convert + :return: The byte sequence of an 32 bit integer + """ + return struct.pack("!I", integer) + + def _ssh_build_mic(self, session_id, username, service, auth_method): + """ + Create the SSH2 MIC filed for gssapi-with-mic. + + :param str session_id: The SSH session ID + :param str username: The name of the user who attempts to login + :param str service: The requested SSH service + :param str auth_method: The requested SSH authentication mechanism + :return: The MIC as defined in RFC 4462. The contents of the + MIC field are: + string session_identifier, + byte SSH_MSG_USERAUTH_REQUEST, + string user-name, + string service (ssh-connection), + string authentication-method + (gssapi-with-mic or gssapi-keyex) + """ + mic = self._make_uint32(len(session_id)) + mic += session_id + mic += struct.pack("B", MSG_USERAUTH_REQUEST) + mic += self._make_uint32(len(username)) + mic += username.encode() + mic += self._make_uint32(len(service)) + mic += service.encode() + mic += self._make_uint32(len(auth_method)) + mic += auth_method.encode() + return mic + + +class _SSH_GSSAPI_OLD(_SSH_GSSAuth): + """ + Implementation of the GSS-API MIT Kerberos Authentication for SSH2, + using the older (unmaintained) python-gssapi package. + + :see: `.GSSAuth` + """ + + def __init__(self, auth_method, gss_deleg_creds): + """ + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not + """ + _SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds) + + if self._gss_deleg_creds: + self._gss_flags = ( + gssapi.C_PROT_READY_FLAG, + gssapi.C_INTEG_FLAG, + gssapi.C_MUTUAL_FLAG, + gssapi.C_DELEG_FLAG, + ) + else: + self._gss_flags = ( + gssapi.C_PROT_READY_FLAG, + gssapi.C_INTEG_FLAG, + gssapi.C_MUTUAL_FLAG, + ) + + def ssh_init_sec_context( + self, target, desired_mech=None, username=None, recv_token=None + ): + """ + Initialize a GSS-API context. + + :param str username: The name of the user who attempts to login + :param str target: The hostname of the target to connect to + :param str desired_mech: The negotiated GSS-API mechanism + ("pseudo negotiated" mechanism, because we + support just the krb5 mechanism :-)) + :param str recv_token: The GSS-API token received from the Server + :raises: + `.SSHException` -- Is raised if the desired mechanism of the client + is not supported + :return: A ``String`` if the GSS-API has returned a token or + ``None`` if no token was returned + """ + from pyasn1.codec.der import decoder + + self._username = username + self._gss_host = target + targ_name = gssapi.Name( + "host@" + self._gss_host, gssapi.C_NT_HOSTBASED_SERVICE + ) + ctx = gssapi.Context() + ctx.flags = self._gss_flags + if desired_mech is None: + krb5_mech = gssapi.OID.mech_from_string(self._krb5_mech) + else: + mech, __ = decoder.decode(desired_mech) + if mech.__str__() != self._krb5_mech: + raise SSHException("Unsupported mechanism OID.") + else: + krb5_mech = gssapi.OID.mech_from_string(self._krb5_mech) + token = None + try: + if recv_token is None: + self._gss_ctxt = gssapi.InitContext( + peer_name=targ_name, + mech_type=krb5_mech, + req_flags=ctx.flags, + ) + token = self._gss_ctxt.step(token) + else: + token = self._gss_ctxt.step(recv_token) + except gssapi.GSSException: + message = "{} Target: {}".format(sys.exc_info()[1], self._gss_host) + raise gssapi.GSSException(message) + self._gss_ctxt_status = self._gss_ctxt.established + return token + + def ssh_get_mic(self, session_id, gss_kex=False): + """ + Create the MIC token for a SSH2 message. + + :param str session_id: The SSH session ID + :param bool gss_kex: Generate the MIC for GSS-API Key Exchange or not + :return: gssapi-with-mic: + Returns the MIC token from GSS-API for the message we created + with ``_ssh_build_mic``. + gssapi-keyex: + Returns the MIC token from GSS-API with the SSH session ID as + message. + """ + self._session_id = session_id + if not gss_kex: + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + mic_token = self._gss_ctxt.get_mic(mic_field) + else: + # for key exchange with gssapi-keyex + mic_token = self._gss_srv_ctxt.get_mic(self._session_id) + return mic_token + + def ssh_accept_sec_context(self, hostname, recv_token, username=None): + """ + Accept a GSS-API context (server mode). + + :param str hostname: The servers hostname + :param str username: The name of the user who attempts to login + :param str recv_token: The GSS-API Token received from the server, + if it's not the initial call. + :return: A ``String`` if the GSS-API has returned a token or ``None`` + if no token was returned + """ + # hostname and username are not required for GSSAPI, but for SSPI + self._gss_host = hostname + self._username = username + if self._gss_srv_ctxt is None: + self._gss_srv_ctxt = gssapi.AcceptContext() + token = self._gss_srv_ctxt.step(recv_token) + self._gss_srv_ctxt_status = self._gss_srv_ctxt.established + return token + + def ssh_check_mic(self, mic_token, session_id, username=None): + """ + Verify the MIC token for a SSH2 message. + + :param str mic_token: The MIC token received from the client + :param str session_id: The SSH session ID + :param str username: The name of the user who attempts to login + :return: None if the MIC check was successful + :raises: ``gssapi.GSSException`` -- if the MIC check failed + """ + self._session_id = session_id + self._username = username + if self._username is not None: + # server mode + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + self._gss_srv_ctxt.verify_mic(mic_field, mic_token) + else: + # for key exchange with gssapi-keyex + # client mode + self._gss_ctxt.verify_mic(self._session_id, mic_token) + + @property + def credentials_delegated(self): + """ + Checks if credentials are delegated (server mode). + + :return: ``True`` if credentials are delegated, otherwise ``False`` + """ + if self._gss_srv_ctxt.delegated_cred is not None: + return True + return False + + def save_client_creds(self, client_token): + """ + Save the Client token in a file. This is used by the SSH server + to store the client credentials if credentials are delegated + (server mode). + + :param str client_token: The GSS-API token received form the client + :raises: + ``NotImplementedError`` -- Credential delegation is currently not + supported in server mode + """ + raise NotImplementedError + + +class _SSH_GSSAPI_NEW(_SSH_GSSAuth): + """ + Implementation of the GSS-API MIT Kerberos Authentication for SSH2, + using the newer, currently maintained gssapi package. + + :see: `.GSSAuth` + """ + + def __init__(self, auth_method, gss_deleg_creds): + """ + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not + """ + _SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds) + + if self._gss_deleg_creds: + self._gss_flags = ( + gssapi.RequirementFlag.protection_ready, + gssapi.RequirementFlag.integrity, + gssapi.RequirementFlag.mutual_authentication, + gssapi.RequirementFlag.delegate_to_peer, + ) + else: + self._gss_flags = ( + gssapi.RequirementFlag.protection_ready, + gssapi.RequirementFlag.integrity, + gssapi.RequirementFlag.mutual_authentication, + ) + + def ssh_init_sec_context( + self, target, desired_mech=None, username=None, recv_token=None + ): + """ + Initialize a GSS-API context. + + :param str username: The name of the user who attempts to login + :param str target: The hostname of the target to connect to + :param str desired_mech: The negotiated GSS-API mechanism + ("pseudo negotiated" mechanism, because we + support just the krb5 mechanism :-)) + :param str recv_token: The GSS-API token received from the Server + :raises: `.SSHException` -- Is raised if the desired mechanism of the + client is not supported + :raises: ``gssapi.exceptions.GSSError`` if there is an error signaled + by the GSS-API implementation + :return: A ``String`` if the GSS-API has returned a token or ``None`` + if no token was returned + """ + from pyasn1.codec.der import decoder + + self._username = username + self._gss_host = target + targ_name = gssapi.Name( + "host@" + self._gss_host, + name_type=gssapi.NameType.hostbased_service, + ) + if desired_mech is not None: + mech, __ = decoder.decode(desired_mech) + if mech.__str__() != self._krb5_mech: + raise SSHException("Unsupported mechanism OID.") + krb5_mech = gssapi.MechType.kerberos + token = None + if recv_token is None: + self._gss_ctxt = gssapi.SecurityContext( + name=targ_name, + flags=self._gss_flags, + mech=krb5_mech, + usage="initiate", + ) + token = self._gss_ctxt.step(token) + else: + token = self._gss_ctxt.step(recv_token) + self._gss_ctxt_status = self._gss_ctxt.complete + return token + + def ssh_get_mic(self, session_id, gss_kex=False): + """ + Create the MIC token for a SSH2 message. + + :param str session_id: The SSH session ID + :param bool gss_kex: Generate the MIC for GSS-API Key Exchange or not + :return: gssapi-with-mic: + Returns the MIC token from GSS-API for the message we created + with ``_ssh_build_mic``. + gssapi-keyex: + Returns the MIC token from GSS-API with the SSH session ID as + message. + :rtype: str + """ + self._session_id = session_id + if not gss_kex: + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + mic_token = self._gss_ctxt.get_signature(mic_field) + else: + # for key exchange with gssapi-keyex + mic_token = self._gss_srv_ctxt.get_signature(self._session_id) + return mic_token + + def ssh_accept_sec_context(self, hostname, recv_token, username=None): + """ + Accept a GSS-API context (server mode). + + :param str hostname: The servers hostname + :param str username: The name of the user who attempts to login + :param str recv_token: The GSS-API Token received from the server, + if it's not the initial call. + :return: A ``String`` if the GSS-API has returned a token or ``None`` + if no token was returned + """ + # hostname and username are not required for GSSAPI, but for SSPI + self._gss_host = hostname + self._username = username + if self._gss_srv_ctxt is None: + self._gss_srv_ctxt = gssapi.SecurityContext(usage="accept") + token = self._gss_srv_ctxt.step(recv_token) + self._gss_srv_ctxt_status = self._gss_srv_ctxt.complete + return token + + def ssh_check_mic(self, mic_token, session_id, username=None): + """ + Verify the MIC token for a SSH2 message. + + :param str mic_token: The MIC token received from the client + :param str session_id: The SSH session ID + :param str username: The name of the user who attempts to login + :return: None if the MIC check was successful + :raises: ``gssapi.exceptions.GSSError`` -- if the MIC check failed + """ + self._session_id = session_id + self._username = username + if self._username is not None: + # server mode + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + self._gss_srv_ctxt.verify_signature(mic_field, mic_token) + else: + # for key exchange with gssapi-keyex + # client mode + self._gss_ctxt.verify_signature(self._session_id, mic_token) + + @property + def credentials_delegated(self): + """ + Checks if credentials are delegated (server mode). + + :return: ``True`` if credentials are delegated, otherwise ``False`` + :rtype: bool + """ + if self._gss_srv_ctxt.delegated_creds is not None: + return True + return False + + def save_client_creds(self, client_token): + """ + Save the Client token in a file. This is used by the SSH server + to store the client credentials if credentials are delegated + (server mode). + + :param str client_token: The GSS-API token received form the client + :raises: ``NotImplementedError`` -- Credential delegation is currently + not supported in server mode + """ + raise NotImplementedError + + +class _SSH_SSPI(_SSH_GSSAuth): + """ + Implementation of the Microsoft SSPI Kerberos Authentication for SSH2. + + :see: `.GSSAuth` + """ + + def __init__(self, auth_method, gss_deleg_creds): + """ + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not + """ + _SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds) + + if self._gss_deleg_creds: + self._gss_flags = ( + sspicon.ISC_REQ_INTEGRITY + | sspicon.ISC_REQ_MUTUAL_AUTH + | sspicon.ISC_REQ_DELEGATE + ) + else: + self._gss_flags = ( + sspicon.ISC_REQ_INTEGRITY | sspicon.ISC_REQ_MUTUAL_AUTH + ) + + def ssh_init_sec_context( + self, target, desired_mech=None, username=None, recv_token=None + ): + """ + Initialize a SSPI context. + + :param str username: The name of the user who attempts to login + :param str target: The FQDN of the target to connect to + :param str desired_mech: The negotiated SSPI mechanism + ("pseudo negotiated" mechanism, because we + support just the krb5 mechanism :-)) + :param recv_token: The SSPI token received from the Server + :raises: + `.SSHException` -- Is raised if the desired mechanism of the client + is not supported + :return: A ``String`` if the SSPI has returned a token or ``None`` if + no token was returned + """ + from pyasn1.codec.der import decoder + + self._username = username + self._gss_host = target + error = 0 + targ_name = "host/" + self._gss_host + if desired_mech is not None: + mech, __ = decoder.decode(desired_mech) + if mech.__str__() != self._krb5_mech: + raise SSHException("Unsupported mechanism OID.") + try: + if recv_token is None: + self._gss_ctxt = sspi.ClientAuth( + "Kerberos", scflags=self._gss_flags, targetspn=targ_name + ) + error, token = self._gss_ctxt.authorize(recv_token) + token = token[0].Buffer + except pywintypes.error as e: + e.strerror += ", Target: {}".format(self._gss_host) + raise + + if error == 0: + """ + if the status is GSS_COMPLETE (error = 0) the context is fully + established an we can set _gss_ctxt_status to True. + """ + self._gss_ctxt_status = True + token = None + """ + You won't get another token if the context is fully established, + so i set token to None instead of "" + """ + return token + + def ssh_get_mic(self, session_id, gss_kex=False): + """ + Create the MIC token for a SSH2 message. + + :param str session_id: The SSH session ID + :param bool gss_kex: Generate the MIC for Key Exchange with SSPI or not + :return: gssapi-with-mic: + Returns the MIC token from SSPI for the message we created + with ``_ssh_build_mic``. + gssapi-keyex: + Returns the MIC token from SSPI with the SSH session ID as + message. + """ + self._session_id = session_id + if not gss_kex: + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + mic_token = self._gss_ctxt.sign(mic_field) + else: + # for key exchange with gssapi-keyex + mic_token = self._gss_srv_ctxt.sign(self._session_id) + return mic_token + + def ssh_accept_sec_context(self, hostname, username, recv_token): + """ + Accept a SSPI context (server mode). + + :param str hostname: The servers FQDN + :param str username: The name of the user who attempts to login + :param str recv_token: The SSPI Token received from the server, + if it's not the initial call. + :return: A ``String`` if the SSPI has returned a token or ``None`` if + no token was returned + """ + self._gss_host = hostname + self._username = username + targ_name = "host/" + self._gss_host + self._gss_srv_ctxt = sspi.ServerAuth("Kerberos", spn=targ_name) + error, token = self._gss_srv_ctxt.authorize(recv_token) + token = token[0].Buffer + if error == 0: + self._gss_srv_ctxt_status = True + token = None + return token + + def ssh_check_mic(self, mic_token, session_id, username=None): + """ + Verify the MIC token for a SSH2 message. + + :param str mic_token: The MIC token received from the client + :param str session_id: The SSH session ID + :param str username: The name of the user who attempts to login + :return: None if the MIC check was successful + :raises: ``sspi.error`` -- if the MIC check failed + """ + self._session_id = session_id + self._username = username + if username is not None: + # server mode + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + # Verifies data and its signature. If verification fails, an + # sspi.error will be raised. + self._gss_srv_ctxt.verify(mic_field, mic_token) + else: + # for key exchange with gssapi-keyex + # client mode + # Verifies data and its signature. If verification fails, an + # sspi.error will be raised. + self._gss_ctxt.verify(self._session_id, mic_token) + + @property + def credentials_delegated(self): + """ + Checks if credentials are delegated (server mode). + + :return: ``True`` if credentials are delegated, otherwise ``False`` + """ + return self._gss_flags & sspicon.ISC_REQ_DELEGATE and ( + self._gss_srv_ctxt_status or self._gss_flags + ) + + def save_client_creds(self, client_token): + """ + Save the Client token in a file. This is used by the SSH server + to store the client credentials if credentials are delegated + (server mode). + + :param str client_token: The SSPI token received form the client + :raises: + ``NotImplementedError`` -- Credential delegation is currently not + supported in server mode + """ + raise NotImplementedError diff --git a/.venv/lib/python3.9/site-packages/paramiko/transport.py b/.venv/lib/python3.9/site-packages/paramiko/transport.py new file mode 100644 index 0000000..472ec6c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/transport.py @@ -0,0 +1,3456 @@ +# Copyright (C) 2003-2007 Robey Pointer +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Core protocol implementation +""" + +import os +import socket +import sys +import threading +import time +import weakref +from hashlib import md5, sha1, sha256, sha512 + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import ( + algorithms, + Cipher, + modes, + aead, +) + +import paramiko +from paramiko import util +from paramiko.auth_handler import AuthHandler, AuthOnlyHandler +from paramiko.ssh_gss import GSSAuth +from paramiko.channel import Channel +from paramiko.common import ( + xffffffff, + cMSG_CHANNEL_OPEN, + cMSG_IGNORE, + cMSG_GLOBAL_REQUEST, + DEBUG, + MSG_KEXINIT, + MSG_IGNORE, + MSG_DISCONNECT, + MSG_DEBUG, + ERROR, + WARNING, + cMSG_UNIMPLEMENTED, + INFO, + cMSG_KEXINIT, + cMSG_NEWKEYS, + MSG_NEWKEYS, + cMSG_REQUEST_SUCCESS, + cMSG_REQUEST_FAILURE, + CONNECTION_FAILED_CODE, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + OPEN_SUCCEEDED, + cMSG_CHANNEL_OPEN_FAILURE, + cMSG_CHANNEL_OPEN_SUCCESS, + MSG_GLOBAL_REQUEST, + MSG_REQUEST_SUCCESS, + MSG_REQUEST_FAILURE, + cMSG_SERVICE_REQUEST, + MSG_SERVICE_ACCEPT, + MSG_CHANNEL_OPEN_SUCCESS, + MSG_CHANNEL_OPEN_FAILURE, + MSG_CHANNEL_OPEN, + MSG_CHANNEL_SUCCESS, + MSG_CHANNEL_FAILURE, + MSG_CHANNEL_DATA, + MSG_CHANNEL_EXTENDED_DATA, + MSG_CHANNEL_WINDOW_ADJUST, + MSG_CHANNEL_REQUEST, + MSG_CHANNEL_EOF, + MSG_CHANNEL_CLOSE, + MIN_WINDOW_SIZE, + MIN_PACKET_SIZE, + MAX_WINDOW_SIZE, + DEFAULT_WINDOW_SIZE, + DEFAULT_MAX_PACKET_SIZE, + HIGHEST_USERAUTH_MESSAGE_ID, + MSG_UNIMPLEMENTED, + MSG_NAMES, + MSG_EXT_INFO, + cMSG_EXT_INFO, + byte_ord, +) +from paramiko.compress import ZlibCompressor, ZlibDecompressor +from paramiko.ed25519key import Ed25519Key +from paramiko.kex_curve25519 import KexCurve25519 +from paramiko.kex_gex import KexGex, KexGexSHA256 +from paramiko.kex_group1 import KexGroup1 +from paramiko.kex_group14 import KexGroup14, KexGroup14SHA256 +from paramiko.kex_group16 import KexGroup16SHA512 +from paramiko.kex_ecdh_nist import KexNistp256, KexNistp384, KexNistp521 +from paramiko.kex_gss import KexGSSGex, KexGSSGroup1, KexGSSGroup14 +from paramiko.message import Message +from paramiko.packet import Packetizer, NeedRekeyException +from paramiko.primes import ModulusPack +from paramiko.rsakey import RSAKey +from paramiko.ecdsakey import ECDSAKey +from paramiko.server import ServerInterface +from paramiko.sftp_client import SFTPClient +from paramiko.ssh_exception import ( + BadAuthenticationType, + ChannelException, + IncompatiblePeer, + MessageOrderError, + ProxyCommandFailure, + SSHException, +) +from paramiko.util import ( + ClosingContextManager, + clamp_value, + b, +) + + +# TripleDES is moving from `cryptography.hazmat.primitives.ciphers.algorithms` +# in cryptography>=43.0.0 to `cryptography.hazmat.decrepit.ciphers.algorithms` +# It will be removed from `cryptography.hazmat.primitives.ciphers.algorithms` +# in cryptography==48.0.0. +# +# Source References: +# - https://github.com/pyca/cryptography/commit/722a6393e61b3ac +# - https://github.com/pyca/cryptography/pull/11407/files +try: + from cryptography.hazmat.decrepit.ciphers.algorithms import TripleDES +except ImportError: + from cryptography.hazmat.primitives.ciphers.algorithms import TripleDES + + +# for thread cleanup +_active_threads = [] + + +def _join_lingering_threads(): + for thr in _active_threads: + thr.stop_thread() + + +import atexit + +atexit.register(_join_lingering_threads) + + +class Transport(threading.Thread, ClosingContextManager): + """ + An SSH Transport attaches to a stream (usually a socket), negotiates an + encrypted session, authenticates, and then creates stream tunnels, called + `channels <.Channel>`, across the session. Multiple channels can be + multiplexed across a single session (and often are, in the case of port + forwardings). + + Instances of this class may be used as context managers. + """ + + _ENCRYPT = object() + _DECRYPT = object() + + _PROTO_ID = "2.0" + _CLIENT_ID = "paramiko_{}".format(paramiko.__version__) + + # These tuples of algorithm identifiers are in preference order; do not + # reorder without reason! + # NOTE: if you need to modify these, we suggest leveraging the + # `disabled_algorithms` constructor argument (also available in SSHClient) + # instead of monkeypatching or subclassing. + _preferred_ciphers = ( + "aes128-ctr", + "aes192-ctr", + "aes256-ctr", + "aes128-cbc", + "aes192-cbc", + "aes256-cbc", + "3des-cbc", + "aes128-gcm@openssh.com", + "aes256-gcm@openssh.com", + ) + _preferred_macs = ( + "hmac-sha2-256", + "hmac-sha2-512", + "hmac-sha2-256-etm@openssh.com", + "hmac-sha2-512-etm@openssh.com", + "hmac-sha1", + "hmac-md5", + "hmac-sha1-96", + "hmac-md5-96", + ) + # ~= HostKeyAlgorithms in OpenSSH land + _preferred_keys = ( + "ssh-ed25519", + "ecdsa-sha2-nistp256", + "ecdsa-sha2-nistp384", + "ecdsa-sha2-nistp521", + "rsa-sha2-512", + "rsa-sha2-256", + "ssh-rsa", + ) + # ~= PubKeyAcceptedAlgorithms + _preferred_pubkeys = ( + "ssh-ed25519", + "ecdsa-sha2-nistp256", + "ecdsa-sha2-nistp384", + "ecdsa-sha2-nistp521", + "rsa-sha2-512", + "rsa-sha2-256", + "ssh-rsa", + ) + _preferred_kex = ( + "ecdh-sha2-nistp256", + "ecdh-sha2-nistp384", + "ecdh-sha2-nistp521", + "diffie-hellman-group16-sha512", + "diffie-hellman-group-exchange-sha256", + "diffie-hellman-group14-sha256", + "diffie-hellman-group-exchange-sha1", + "diffie-hellman-group14-sha1", + "diffie-hellman-group1-sha1", + ) + if KexCurve25519.is_available(): + _preferred_kex = ("curve25519-sha256@libssh.org",) + _preferred_kex + _preferred_gsskex = ( + "gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==", + "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==", + "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==", + ) + _preferred_compression = ("none",) + + _cipher_info = { + "aes128-ctr": { + "class": algorithms.AES, + "mode": modes.CTR, + "block-size": 16, + "key-size": 16, + }, + "aes192-ctr": { + "class": algorithms.AES, + "mode": modes.CTR, + "block-size": 16, + "key-size": 24, + }, + "aes256-ctr": { + "class": algorithms.AES, + "mode": modes.CTR, + "block-size": 16, + "key-size": 32, + }, + "aes128-cbc": { + "class": algorithms.AES, + "mode": modes.CBC, + "block-size": 16, + "key-size": 16, + }, + "aes192-cbc": { + "class": algorithms.AES, + "mode": modes.CBC, + "block-size": 16, + "key-size": 24, + }, + "aes256-cbc": { + "class": algorithms.AES, + "mode": modes.CBC, + "block-size": 16, + "key-size": 32, + }, + "3des-cbc": { + "class": TripleDES, + "mode": modes.CBC, + "block-size": 8, + "key-size": 24, + }, + "aes128-gcm@openssh.com": { + "class": aead.AESGCM, + "block-size": 16, + "iv-size": 12, + "key-size": 16, + "is_aead": True, + }, + "aes256-gcm@openssh.com": { + "class": aead.AESGCM, + "block-size": 16, + "iv-size": 12, + "key-size": 32, + "is_aead": True, + }, + } + + _mac_info = { + "hmac-sha1": {"class": sha1, "size": 20}, + "hmac-sha1-96": {"class": sha1, "size": 12}, + "hmac-sha2-256": {"class": sha256, "size": 32}, + "hmac-sha2-256-etm@openssh.com": {"class": sha256, "size": 32}, + "hmac-sha2-512": {"class": sha512, "size": 64}, + "hmac-sha2-512-etm@openssh.com": {"class": sha512, "size": 64}, + "hmac-md5": {"class": md5, "size": 16}, + "hmac-md5-96": {"class": md5, "size": 12}, + } + + _key_info = { + # TODO: at some point we will want to drop this as it's no longer + # considered secure due to using SHA-1 for signatures. OpenSSH 8.8 no + # longer supports it. Question becomes at what point do we want to + # prevent users with older setups from using this? + "ssh-rsa": RSAKey, + "ssh-rsa-cert-v01@openssh.com": RSAKey, + "rsa-sha2-256": RSAKey, + "rsa-sha2-256-cert-v01@openssh.com": RSAKey, + "rsa-sha2-512": RSAKey, + "rsa-sha2-512-cert-v01@openssh.com": RSAKey, + "ecdsa-sha2-nistp256": ECDSAKey, + "ecdsa-sha2-nistp256-cert-v01@openssh.com": ECDSAKey, + "ecdsa-sha2-nistp384": ECDSAKey, + "ecdsa-sha2-nistp384-cert-v01@openssh.com": ECDSAKey, + "ecdsa-sha2-nistp521": ECDSAKey, + "ecdsa-sha2-nistp521-cert-v01@openssh.com": ECDSAKey, + "ssh-ed25519": Ed25519Key, + "ssh-ed25519-cert-v01@openssh.com": Ed25519Key, + } + + _kex_info = { + "diffie-hellman-group1-sha1": KexGroup1, + "diffie-hellman-group14-sha1": KexGroup14, + "diffie-hellman-group-exchange-sha1": KexGex, + "diffie-hellman-group-exchange-sha256": KexGexSHA256, + "diffie-hellman-group14-sha256": KexGroup14SHA256, + "diffie-hellman-group16-sha512": KexGroup16SHA512, + "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGroup1, + "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGroup14, + "gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGex, + "ecdh-sha2-nistp256": KexNistp256, + "ecdh-sha2-nistp384": KexNistp384, + "ecdh-sha2-nistp521": KexNistp521, + } + if KexCurve25519.is_available(): + _kex_info["curve25519-sha256@libssh.org"] = KexCurve25519 + + _compression_info = { + # zlib@openssh.com is just zlib, but only turned on after a successful + # authentication. openssh servers may only offer this type because + # they've had troubles with security holes in zlib in the past. + "zlib@openssh.com": (ZlibCompressor, ZlibDecompressor), + "zlib": (ZlibCompressor, ZlibDecompressor), + "none": (None, None), + } + + _modulus_pack = None + _active_check_timeout = 0.1 + + def __init__( + self, + sock, + default_window_size=DEFAULT_WINDOW_SIZE, + default_max_packet_size=DEFAULT_MAX_PACKET_SIZE, + gss_kex=False, + gss_deleg_creds=True, + disabled_algorithms=None, + server_sig_algs=True, + strict_kex=True, + packetizer_class=None, + ): + """ + Create a new SSH session over an existing socket, or socket-like + object. This only creates the `.Transport` object; it doesn't begin + the SSH session yet. Use `connect` or `start_client` to begin a client + session, or `start_server` to begin a server session. + + If the object is not actually a socket, it must have the following + methods: + + - ``send(bytes)``: Writes from 1 to ``len(bytes)`` bytes, and returns + an int representing the number of bytes written. Returns + 0 or raises ``EOFError`` if the stream has been closed. + - ``recv(int)``: Reads from 1 to ``int`` bytes and returns them as a + string. Returns 0 or raises ``EOFError`` if the stream has been + closed. + - ``close()``: Closes the socket. + - ``settimeout(n)``: Sets a (float) timeout on I/O operations. + + For ease of use, you may also pass in an address (as a tuple) or a host + string as the ``sock`` argument. (A host string is a hostname with an + optional port (separated by ``":"``) which will be converted into a + tuple of ``(hostname, port)``.) A socket will be connected to this + address and used for communication. Exceptions from the ``socket`` + call may be thrown in this case. + + .. note:: + Modifying the the window and packet sizes might have adverse + effects on your channels created from this transport. The default + values are the same as in the OpenSSH code base and have been + battle tested. + + :param socket sock: + a socket or socket-like object to create the session over. + :param int default_window_size: + sets the default window size on the transport. (defaults to + 2097152) + :param int default_max_packet_size: + sets the default max packet size on the transport. (defaults to + 32768) + :param bool gss_kex: + Whether to enable GSSAPI key exchange when GSSAPI is in play. + Default: ``False``. + :param bool gss_deleg_creds: + Whether to enable GSSAPI credential delegation when GSSAPI is in + play. Default: ``True``. + :param dict disabled_algorithms: + If given, must be a dictionary mapping algorithm type to an + iterable of algorithm identifiers, which will be disabled for the + lifetime of the transport. + + Keys should match the last word in the class' builtin algorithm + tuple attributes, such as ``"ciphers"`` to disable names within + ``_preferred_ciphers``; or ``"kex"`` to disable something defined + inside ``_preferred_kex``. Values should exactly match members of + the matching attribute. + + For example, if you need to disable + ``diffie-hellman-group16-sha512`` key exchange (perhaps because + your code talks to a server which implements it differently from + Paramiko), specify ``disabled_algorithms={"kex": + ["diffie-hellman-group16-sha512"]}``. + :param bool server_sig_algs: + Whether to send an extra message to compatible clients, in server + mode, with a list of supported pubkey algorithms. Default: + ``True``. + :param bool strict_kex: + Whether to advertise (and implement, if client also advertises + support for) a "strict kex" mode for safer handshaking. Default: + ``True``. + :param packetizer_class: + Which class to use for instantiating the internal packet handler. + Default: ``None`` (i.e.: use `Packetizer` as normal). + + .. versionchanged:: 1.15 + Added the ``default_window_size`` and ``default_max_packet_size`` + arguments. + .. versionchanged:: 1.15 + Added the ``gss_kex`` and ``gss_deleg_creds`` kwargs. + .. versionchanged:: 2.6 + Added the ``disabled_algorithms`` kwarg. + .. versionchanged:: 2.9 + Added the ``server_sig_algs`` kwarg. + .. versionchanged:: 3.4 + Added the ``strict_kex`` kwarg. + .. versionchanged:: 3.4 + Added the ``packetizer_class`` kwarg. + """ + self.active = False + self.hostname = None + self.server_extensions = {} + self.advertise_strict_kex = strict_kex + self.agreed_on_strict_kex = False + + # TODO: these two overrides on sock's type should go away sometime, too + # many ways to do it! + if isinstance(sock, str): + # convert "host:port" into (host, port) + hl = sock.split(":", 1) + self.hostname = hl[0] + if len(hl) == 1: + sock = (hl[0], 22) + else: + sock = (hl[0], int(hl[1])) + if type(sock) is tuple: + # connect to the given (host, port) + hostname, port = sock + self.hostname = hostname + reason = "No suitable address family" + addrinfos = socket.getaddrinfo( + hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM + ) + for family, socktype, proto, canonname, sockaddr in addrinfos: + if socktype == socket.SOCK_STREAM: + af = family + # addr = sockaddr + sock = socket.socket(af, socket.SOCK_STREAM) + try: + sock.connect((hostname, port)) + except socket.error as e: + reason = str(e) + else: + break + else: + raise SSHException( + "Unable to connect to {}: {}".format(hostname, reason) + ) + # okay, normal socket-ish flow here... + threading.Thread.__init__(self) + self.daemon = True + self.sock = sock + # we set the timeout so we can check self.active periodically to + # see if we should bail. socket.timeout exception is never propagated. + self.sock.settimeout(self._active_check_timeout) + + # negotiated crypto parameters + self.packetizer = (packetizer_class or Packetizer)(sock) + self.local_version = "SSH-" + self._PROTO_ID + "-" + self._CLIENT_ID + self.remote_version = "" + self.local_cipher = self.remote_cipher = "" + self.local_kex_init = self.remote_kex_init = None + self.local_mac = self.remote_mac = None + self.local_compression = self.remote_compression = None + self.session_id = None + self.host_key_type = None + self.host_key = None + + # GSS-API / SSPI Key Exchange + self.use_gss_kex = gss_kex + # This will be set to True if GSS-API Key Exchange was performed + self.gss_kex_used = False + self.kexgss_ctxt = None + self.gss_host = None + if self.use_gss_kex: + self.kexgss_ctxt = GSSAuth("gssapi-keyex", gss_deleg_creds) + self._preferred_kex = self._preferred_gsskex + self._preferred_kex + + # state used during negotiation + self.kex_engine = None + self.H = None + self.K = None + + self.initial_kex_done = False + self.in_kex = False + self.authenticated = False + self._expected_packet = tuple() + # synchronization (always higher level than write_lock) + self.lock = threading.Lock() + + # tracking open channels + self._channels = ChannelMap() + self.channel_events = {} # (id -> Event) + self.channels_seen = {} # (id -> True) + self._channel_counter = 0 + self.default_max_packet_size = default_max_packet_size + self.default_window_size = default_window_size + self._forward_agent_handler = None + self._x11_handler = None + self._tcp_handler = None + + self.saved_exception = None + self.clear_to_send = threading.Event() + self.clear_to_send_lock = threading.Lock() + self.clear_to_send_timeout = 30.0 + self.log_name = "paramiko.transport" + self.logger = util.get_logger(self.log_name) + self.packetizer.set_log(self.logger) + self.auth_handler = None + # response Message from an arbitrary global request + self.global_response = None + # user-defined event callbacks + self.completion_event = None + # how long (seconds) to wait for the SSH banner + self.banner_timeout = 15 + # how long (seconds) to wait for the handshake to finish after SSH + # banner sent. + self.handshake_timeout = 15 + # how long (seconds) to wait for the auth response. + self.auth_timeout = 30 + # how long (seconds) to wait for opening a channel + self.channel_timeout = 60 * 60 + self.disabled_algorithms = disabled_algorithms or {} + self.server_sig_algs = server_sig_algs + + # server mode: + self.server_mode = False + self.server_object = None + self.server_key_dict = {} + self.server_accepts = [] + self.server_accept_cv = threading.Condition(self.lock) + self.subsystem_table = {} + + # Handler table, now set at init time for easier per-instance + # manipulation and subclass twiddling. + self._handler_table = { + MSG_EXT_INFO: self._parse_ext_info, + MSG_NEWKEYS: self._parse_newkeys, + MSG_GLOBAL_REQUEST: self._parse_global_request, + MSG_REQUEST_SUCCESS: self._parse_request_success, + MSG_REQUEST_FAILURE: self._parse_request_failure, + MSG_CHANNEL_OPEN_SUCCESS: self._parse_channel_open_success, + MSG_CHANNEL_OPEN_FAILURE: self._parse_channel_open_failure, + MSG_CHANNEL_OPEN: self._parse_channel_open, + MSG_KEXINIT: self._negotiate_keys, + } + + def _filter_algorithm(self, type_): + default = getattr(self, "_preferred_{}".format(type_)) + return tuple( + x + for x in default + if x not in self.disabled_algorithms.get(type_, []) + ) + + @property + def preferred_ciphers(self): + return self._filter_algorithm("ciphers") + + @property + def preferred_macs(self): + return self._filter_algorithm("macs") + + @property + def preferred_keys(self): + # Interleave cert variants here; resistant to various background + # overwriting of _preferred_keys, and necessary as hostkeys can't use + # the logic pubkey auth does re: injecting/checking for certs at + # runtime + filtered = self._filter_algorithm("keys") + return tuple( + filtered + + tuple("{}-cert-v01@openssh.com".format(x) for x in filtered) + ) + + @property + def preferred_pubkeys(self): + return self._filter_algorithm("pubkeys") + + @property + def preferred_kex(self): + return self._filter_algorithm("kex") + + @property + def preferred_compression(self): + return self._filter_algorithm("compression") + + def __repr__(self): + """ + Returns a string representation of this object, for debugging. + """ + id_ = hex(id(self) & xffffffff) + out = "` or + `auth_publickey `. + + .. note:: `connect` is a simpler method for connecting as a client. + + .. note:: + After calling this method (or `start_server` or `connect`), you + should no longer directly read from or write to the original socket + object. + + :param .threading.Event event: + an event to trigger when negotiation is complete (optional) + + :param float timeout: + a timeout, in seconds, for SSH2 session negotiation (optional) + + :raises: + `.SSHException` -- if negotiation fails (and no ``event`` was + passed in) + """ + self.active = True + if event is not None: + # async, return immediately and let the app poll for completion + self.completion_event = event + self.start() + return + + # synchronous, wait for a result + self.completion_event = event = threading.Event() + self.start() + max_time = time.time() + timeout if timeout is not None else None + while True: + event.wait(0.1) + if not self.active: + e = self.get_exception() + if e is not None: + raise e + raise SSHException("Negotiation failed.") + if event.is_set() or ( + timeout is not None and time.time() >= max_time + ): + break + + def start_server(self, event=None, server=None): + """ + Negotiate a new SSH2 session as a server. This is the first step after + creating a new `.Transport` and setting up your server host key(s). A + separate thread is created for protocol negotiation. + + If an event is passed in, this method returns immediately. When + negotiation is done (successful or not), the given ``Event`` will + be triggered. On failure, `is_active` will return ``False``. + + (Since 1.4) If ``event`` is ``None``, this method will not return until + negotiation is done. On success, the method returns normally. + Otherwise an SSHException is raised. + + After a successful negotiation, the client will need to authenticate. + Override the methods `get_allowed_auths + <.ServerInterface.get_allowed_auths>`, `check_auth_none + <.ServerInterface.check_auth_none>`, `check_auth_password + <.ServerInterface.check_auth_password>`, and `check_auth_publickey + <.ServerInterface.check_auth_publickey>` in the given ``server`` object + to control the authentication process. + + After a successful authentication, the client should request to open a + channel. Override `check_channel_request + <.ServerInterface.check_channel_request>` in the given ``server`` + object to allow channels to be opened. + + .. note:: + After calling this method (or `start_client` or `connect`), you + should no longer directly read from or write to the original socket + object. + + :param .threading.Event event: + an event to trigger when negotiation is complete. + :param .ServerInterface server: + an object used to perform authentication and create `channels + <.Channel>` + + :raises: + `.SSHException` -- if negotiation fails (and no ``event`` was + passed in) + """ + if server is None: + server = ServerInterface() + self.server_mode = True + self.server_object = server + self.active = True + if event is not None: + # async, return immediately and let the app poll for completion + self.completion_event = event + self.start() + return + + # synchronous, wait for a result + self.completion_event = event = threading.Event() + self.start() + while True: + event.wait(0.1) + if not self.active: + e = self.get_exception() + if e is not None: + raise e + raise SSHException("Negotiation failed.") + if event.is_set(): + break + + def add_server_key(self, key): + """ + Add a host key to the list of keys used for server mode. When behaving + as a server, the host key is used to sign certain packets during the + SSH2 negotiation, so that the client can trust that we are who we say + we are. Because this is used for signing, the key must contain private + key info, not just the public half. Only one key of each type is kept. + + :param .PKey key: + the host key (instance of some subclass) to add + """ + self.server_key_dict[key.get_name()] = key + # Handle SHA-2 extensions for RSA by ensuring that lookups into + # self.server_key_dict will yield this key for any of the algorithm + # names. + if isinstance(key, RSAKey): + self.server_key_dict["rsa-sha2-256"] = key + self.server_key_dict["rsa-sha2-512"] = key + + def get_server_key(self): + """ + Return the active host key, in server mode. After negotiating with the + client, this method will return the negotiated host key. If only one + type of host key was set with `add_server_key`, that's the only key + that will ever be returned. But in cases where you have set more than + one type of host key, the key type will be negotiated by the client, + and this method will return the key of the type agreed on. If the host + key has not been negotiated yet, ``None`` is returned. In client mode, + the behavior is undefined. + + :return: + host key (`.PKey`) of the type negotiated by the client, or + ``None``. + """ + try: + return self.server_key_dict[self.host_key_type] + except KeyError: + pass + return None + + @staticmethod + def load_server_moduli(filename=None): + """ + (optional) + Load a file of prime moduli for use in doing group-exchange key + negotiation in server mode. It's a rather obscure option and can be + safely ignored. + + In server mode, the remote client may request "group-exchange" key + negotiation, which asks the server to send a random prime number that + fits certain criteria. These primes are pretty difficult to compute, + so they can't be generated on demand. But many systems contain a file + of suitable primes (usually named something like ``/etc/ssh/moduli``). + If you call `load_server_moduli` and it returns ``True``, then this + file of primes has been loaded and we will support "group-exchange" in + server mode. Otherwise server mode will just claim that it doesn't + support that method of key negotiation. + + :param str filename: + optional path to the moduli file, if you happen to know that it's + not in a standard location. + :return: + True if a moduli file was successfully loaded; False otherwise. + + .. note:: This has no effect when used in client mode. + """ + Transport._modulus_pack = ModulusPack() + # places to look for the openssh "moduli" file + file_list = ["/etc/ssh/moduli", "/usr/local/etc/moduli"] + if filename is not None: + file_list.insert(0, filename) + for fn in file_list: + try: + Transport._modulus_pack.read_file(fn) + return True + except IOError: + pass + # none succeeded + Transport._modulus_pack = None + return False + + def close(self): + """ + Close this session, and any open channels that are tied to it. + """ + if not self.active: + return + self.stop_thread() + for chan in list(self._channels.values()): + chan._unlink() + self.sock.close() + + def get_remote_server_key(self): + """ + Return the host key of the server (in client mode). + + .. note:: + Previously this call returned a tuple of ``(key type, key + string)``. You can get the same effect by calling `.PKey.get_name` + for the key type, and ``str(key)`` for the key string. + + :raises: `.SSHException` -- if no session is currently active. + + :return: public key (`.PKey`) of the remote server + """ + if (not self.active) or (not self.initial_kex_done): + raise SSHException("No existing session") + return self.host_key + + def is_active(self): + """ + Return true if this session is active (open). + + :return: + True if the session is still active (open); False if the session is + closed + """ + return self.active + + def open_session( + self, window_size=None, max_packet_size=None, timeout=None + ): + """ + Request a new channel to the server, of type ``"session"``. This is + just an alias for calling `open_channel` with an argument of + ``"session"``. + + .. note:: Modifying the the window and packet sizes might have adverse + effects on the session created. The default values are the same + as in the OpenSSH code base and have been battle tested. + + :param int window_size: + optional window size for this session. + :param int max_packet_size: + optional max packet size for this session. + + :return: a new `.Channel` + + :raises: + `.SSHException` -- if the request is rejected or the session ends + prematurely + + .. versionchanged:: 1.13.4/1.14.3/1.15.3 + Added the ``timeout`` argument. + .. versionchanged:: 1.15 + Added the ``window_size`` and ``max_packet_size`` arguments. + """ + return self.open_channel( + "session", + window_size=window_size, + max_packet_size=max_packet_size, + timeout=timeout, + ) + + def open_x11_channel(self, src_addr=None): + """ + Request a new channel to the client, of type ``"x11"``. This + is just an alias for ``open_channel('x11', src_addr=src_addr)``. + + :param tuple src_addr: + the source address (``(str, int)``) of the x11 server (port is the + x11 port, ie. 6010) + :return: a new `.Channel` + + :raises: + `.SSHException` -- if the request is rejected or the session ends + prematurely + """ + return self.open_channel("x11", src_addr=src_addr) + + def open_forward_agent_channel(self): + """ + Request a new channel to the client, of type + ``"auth-agent@openssh.com"``. + + This is just an alias for ``open_channel('auth-agent@openssh.com')``. + + :return: a new `.Channel` + + :raises: `.SSHException` -- + if the request is rejected or the session ends prematurely + """ + return self.open_channel("auth-agent@openssh.com") + + def open_forwarded_tcpip_channel(self, src_addr, dest_addr): + """ + Request a new channel back to the client, of type ``forwarded-tcpip``. + + This is used after a client has requested port forwarding, for sending + incoming connections back to the client. + + :param src_addr: originator's address + :param dest_addr: local (server) connected address + """ + return self.open_channel("forwarded-tcpip", dest_addr, src_addr) + + def open_channel( + self, + kind, + dest_addr=None, + src_addr=None, + window_size=None, + max_packet_size=None, + timeout=None, + ): + """ + Request a new channel to the server. `Channels <.Channel>` are + socket-like objects used for the actual transfer of data across the + session. You may only request a channel after negotiating encryption + (using `connect` or `start_client`) and authenticating. + + .. note:: Modifying the the window and packet sizes might have adverse + effects on the channel created. The default values are the same + as in the OpenSSH code base and have been battle tested. + + :param str kind: + the kind of channel requested (usually ``"session"``, + ``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"``) + :param tuple dest_addr: + the destination address (address + port tuple) of this port + forwarding, if ``kind`` is ``"forwarded-tcpip"`` or + ``"direct-tcpip"`` (ignored for other channel types) + :param src_addr: the source address of this port forwarding, if + ``kind`` is ``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"`` + :param int window_size: + optional window size for this session. + :param int max_packet_size: + optional max packet size for this session. + :param float timeout: + optional timeout opening a channel, default 3600s (1h) + + :return: a new `.Channel` on success + + :raises: + `.SSHException` -- if the request is rejected, the session ends + prematurely or there is a timeout opening a channel + + .. versionchanged:: 1.15 + Added the ``window_size`` and ``max_packet_size`` arguments. + """ + if not self.active: + raise SSHException("SSH session not active") + timeout = self.channel_timeout if timeout is None else timeout + self.lock.acquire() + try: + window_size = self._sanitize_window_size(window_size) + max_packet_size = self._sanitize_packet_size(max_packet_size) + chanid = self._next_channel() + m = Message() + m.add_byte(cMSG_CHANNEL_OPEN) + m.add_string(kind) + m.add_int(chanid) + m.add_int(window_size) + m.add_int(max_packet_size) + if (kind == "forwarded-tcpip") or (kind == "direct-tcpip"): + m.add_string(dest_addr[0]) + m.add_int(dest_addr[1]) + m.add_string(src_addr[0]) + m.add_int(src_addr[1]) + elif kind == "x11": + m.add_string(src_addr[0]) + m.add_int(src_addr[1]) + chan = Channel(chanid) + self._channels.put(chanid, chan) + self.channel_events[chanid] = event = threading.Event() + self.channels_seen[chanid] = True + chan._set_transport(self) + chan._set_window(window_size, max_packet_size) + finally: + self.lock.release() + self._send_user_message(m) + start_ts = time.time() + while True: + event.wait(0.1) + if not self.active: + e = self.get_exception() + if e is None: + e = SSHException("Unable to open channel.") + raise e + if event.is_set(): + break + elif start_ts + timeout < time.time(): + raise SSHException("Timeout opening channel.") + chan = self._channels.get(chanid) + if chan is not None: + return chan + e = self.get_exception() + if e is None: + e = SSHException("Unable to open channel.") + raise e + + def request_port_forward(self, address, port, handler=None): + """ + Ask the server to forward TCP connections from a listening port on + the server, across this SSH session. + + If a handler is given, that handler is called from a different thread + whenever a forwarded connection arrives. The handler parameters are:: + + handler( + channel, + (origin_addr, origin_port), + (server_addr, server_port), + ) + + where ``server_addr`` and ``server_port`` are the address and port that + the server was listening on. + + If no handler is set, the default behavior is to send new incoming + forwarded connections into the accept queue, to be picked up via + `accept`. + + :param str address: the address to bind when forwarding + :param int port: + the port to forward, or 0 to ask the server to allocate any port + :param callable handler: + optional handler for incoming forwarded connections, of the form + ``func(Channel, (str, int), (str, int))``. + + :return: the port number (`int`) allocated by the server + + :raises: + `.SSHException` -- if the server refused the TCP forward request + """ + if not self.active: + raise SSHException("SSH session not active") + port = int(port) + response = self.global_request( + "tcpip-forward", (address, port), wait=True + ) + if response is None: + raise SSHException("TCP forwarding request denied") + if port == 0: + port = response.get_int() + if handler is None: + + def default_handler(channel, src_addr, dest_addr_port): + # src_addr, src_port = src_addr_port + # dest_addr, dest_port = dest_addr_port + self._queue_incoming_channel(channel) + + handler = default_handler + self._tcp_handler = handler + return port + + def cancel_port_forward(self, address, port): + """ + Ask the server to cancel a previous port-forwarding request. No more + connections to the given address & port will be forwarded across this + ssh connection. + + :param str address: the address to stop forwarding + :param int port: the port to stop forwarding + """ + if not self.active: + return + self._tcp_handler = None + self.global_request("cancel-tcpip-forward", (address, port), wait=True) + + def open_sftp_client(self): + """ + Create an SFTP client channel from an open transport. On success, an + SFTP session will be opened with the remote host, and a new + `.SFTPClient` object will be returned. + + :return: + a new `.SFTPClient` referring to an sftp session (channel) across + this transport + """ + return SFTPClient.from_transport(self) + + def send_ignore(self, byte_count=None): + """ + Send a junk packet across the encrypted link. This is sometimes used + to add "noise" to a connection to confuse would-be attackers. It can + also be used as a keep-alive for long lived connections traversing + firewalls. + + :param int byte_count: + the number of random bytes to send in the payload of the ignored + packet -- defaults to a random number from 10 to 41. + """ + m = Message() + m.add_byte(cMSG_IGNORE) + if byte_count is None: + byte_count = (byte_ord(os.urandom(1)) % 32) + 10 + m.add_bytes(os.urandom(byte_count)) + self._send_user_message(m) + + def renegotiate_keys(self): + """ + Force this session to switch to new keys. Normally this is done + automatically after the session hits a certain number of packets or + bytes sent or received, but this method gives you the option of forcing + new keys whenever you want. Negotiating new keys causes a pause in + traffic both ways as the two sides swap keys and do computations. This + method returns when the session has switched to new keys. + + :raises: + `.SSHException` -- if the key renegotiation failed (which causes + the session to end) + """ + self.completion_event = threading.Event() + self._send_kex_init() + while True: + self.completion_event.wait(0.1) + if not self.active: + e = self.get_exception() + if e is not None: + raise e + raise SSHException("Negotiation failed.") + if self.completion_event.is_set(): + break + return + + def set_keepalive(self, interval): + """ + Turn on/off keepalive packets (default is off). If this is set, after + ``interval`` seconds without sending any data over the connection, a + "keepalive" packet will be sent (and ignored by the remote host). This + can be useful to keep connections alive over a NAT, for example. + + :param int interval: + seconds to wait before sending a keepalive packet (or + 0 to disable keepalives). + """ + + def _request(x=weakref.proxy(self)): + return x.global_request("keepalive@lag.net", wait=False) + + self.packetizer.set_keepalive(interval, _request) + + def global_request(self, kind, data=None, wait=True): + """ + Make a global request to the remote host. These are normally + extensions to the SSH2 protocol. + + :param str kind: name of the request. + :param tuple data: + an optional tuple containing additional data to attach to the + request. + :param bool wait: + ``True`` if this method should not return until a response is + received; ``False`` otherwise. + :return: + a `.Message` containing possible additional data if the request was + successful (or an empty `.Message` if ``wait`` was ``False``); + ``None`` if the request was denied. + """ + if wait: + self.completion_event = threading.Event() + m = Message() + m.add_byte(cMSG_GLOBAL_REQUEST) + m.add_string(kind) + m.add_boolean(wait) + if data is not None: + m.add(*data) + self._log(DEBUG, 'Sending global request "{}"'.format(kind)) + self._send_user_message(m) + if not wait: + return None + while True: + self.completion_event.wait(0.1) + if not self.active: + return None + if self.completion_event.is_set(): + break + return self.global_response + + def accept(self, timeout=None): + """ + Return the next channel opened by the client over this transport, in + server mode. If no channel is opened before the given timeout, + ``None`` is returned. + + :param int timeout: + seconds to wait for a channel, or ``None`` to wait forever + :return: a new `.Channel` opened by the client + """ + self.lock.acquire() + try: + if len(self.server_accepts) > 0: + chan = self.server_accepts.pop(0) + else: + self.server_accept_cv.wait(timeout) + if len(self.server_accepts) > 0: + chan = self.server_accepts.pop(0) + else: + # timeout + chan = None + finally: + self.lock.release() + return chan + + def connect( + self, + hostkey=None, + username="", + password=None, + pkey=None, + gss_host=None, + gss_auth=False, + gss_kex=False, + gss_deleg_creds=True, + gss_trust_dns=True, + ): + """ + Negotiate an SSH2 session, and optionally verify the server's host key + and authenticate using a password or private key. This is a shortcut + for `start_client`, `get_remote_server_key`, and + `Transport.auth_password` or `Transport.auth_publickey`. Use those + methods if you want more control. + + You can use this method immediately after creating a Transport to + negotiate encryption with a server. If it fails, an exception will be + thrown. On success, the method will return cleanly, and an encrypted + session exists. You may immediately call `open_channel` or + `open_session` to get a `.Channel` object, which is used for data + transfer. + + .. note:: + If you fail to supply a password or private key, this method may + succeed, but a subsequent `open_channel` or `open_session` call may + fail because you haven't authenticated yet. + + :param .PKey hostkey: + the host key expected from the server, or ``None`` if you don't + want to do host key verification. + :param str username: the username to authenticate as. + :param str password: + a password to use for authentication, if you want to use password + authentication; otherwise ``None``. + :param .PKey pkey: + a private key to use for authentication, if you want to use private + key authentication; otherwise ``None``. + :param str gss_host: + The target's name in the kerberos database. Default: hostname + :param bool gss_auth: + ``True`` if you want to use GSS-API authentication. + :param bool gss_kex: + Perform GSS-API Key Exchange and user authentication. + :param bool gss_deleg_creds: + Whether to delegate GSS-API client credentials. + :param gss_trust_dns: + Indicates whether or not the DNS is trusted to securely + canonicalize the name of the host being connected to (default + ``True``). + + :raises: `.SSHException` -- if the SSH2 negotiation fails, the host key + supplied by the server is incorrect, or authentication fails. + + .. versionchanged:: 2.3 + Added the ``gss_trust_dns`` argument. + """ + if hostkey is not None: + # TODO: a more robust implementation would be to ask each key class + # for its nameS plural, and just use that. + # TODO: that could be used in a bunch of other spots too + if isinstance(hostkey, RSAKey): + self._preferred_keys = [ + "rsa-sha2-512", + "rsa-sha2-256", + "ssh-rsa", + ] + else: + self._preferred_keys = [hostkey.get_name()] + + self.set_gss_host( + gss_host=gss_host, + trust_dns=gss_trust_dns, + gssapi_requested=gss_kex or gss_auth, + ) + + self.start_client() + + # check host key if we were given one + # If GSS-API Key Exchange was performed, we are not required to check + # the host key. + if (hostkey is not None) and not gss_kex: + key = self.get_remote_server_key() + if ( + key.get_name() != hostkey.get_name() + or key.asbytes() != hostkey.asbytes() + ): + self._log(DEBUG, "Bad host key from server") + self._log( + DEBUG, + "Expected: {}: {}".format( + hostkey.get_name(), repr(hostkey.asbytes()) + ), + ) + self._log( + DEBUG, + "Got : {}: {}".format( + key.get_name(), repr(key.asbytes()) + ), + ) + raise SSHException("Bad host key from server") + self._log( + DEBUG, "Host key verified ({})".format(hostkey.get_name()) + ) + + if (pkey is not None) or (password is not None) or gss_auth or gss_kex: + if gss_auth: + self._log( + DEBUG, "Attempting GSS-API auth... (gssapi-with-mic)" + ) # noqa + self.auth_gssapi_with_mic( + username, self.gss_host, gss_deleg_creds + ) + elif gss_kex: + self._log(DEBUG, "Attempting GSS-API auth... (gssapi-keyex)") + self.auth_gssapi_keyex(username) + elif pkey is not None: + self._log(DEBUG, "Attempting public-key auth...") + self.auth_publickey(username, pkey) + else: + self._log(DEBUG, "Attempting password auth...") + self.auth_password(username, password) + + return + + def get_exception(self): + """ + Return any exception that happened during the last server request. + This can be used to fetch more specific error information after using + calls like `start_client`. The exception (if any) is cleared after + this call. + + :return: + an exception, or ``None`` if there is no stored exception. + + .. versionadded:: 1.1 + """ + self.lock.acquire() + try: + e = self.saved_exception + self.saved_exception = None + return e + finally: + self.lock.release() + + def set_subsystem_handler(self, name, handler, *args, **kwargs): + """ + Set the handler class for a subsystem in server mode. If a request + for this subsystem is made on an open ssh channel later, this handler + will be constructed and called -- see `.SubsystemHandler` for more + detailed documentation. + + Any extra parameters (including keyword arguments) are saved and + passed to the `.SubsystemHandler` constructor later. + + :param str name: name of the subsystem. + :param handler: + subclass of `.SubsystemHandler` that handles this subsystem. + """ + try: + self.lock.acquire() + self.subsystem_table[name] = (handler, args, kwargs) + finally: + self.lock.release() + + def is_authenticated(self): + """ + Return true if this session is active and authenticated. + + :return: + True if the session is still open and has been authenticated + successfully; False if authentication failed and/or the session is + closed. + """ + return ( + self.active + and self.auth_handler is not None + and self.auth_handler.is_authenticated() + ) + + def get_username(self): + """ + Return the username this connection is authenticated for. If the + session is not authenticated (or authentication failed), this method + returns ``None``. + + :return: username that was authenticated (a `str`), or ``None``. + """ + if not self.active or (self.auth_handler is None): + return None + return self.auth_handler.get_username() + + def get_banner(self): + """ + Return the banner supplied by the server upon connect. If no banner is + supplied, this method returns ``None``. + + :returns: server supplied banner (`str`), or ``None``. + + .. versionadded:: 1.13 + """ + if not self.active or (self.auth_handler is None): + return None + return self.auth_handler.banner + + def auth_none(self, username): + """ + Try to authenticate to the server using no authentication at all. + This will almost always fail. It may be useful for determining the + list of authentication types supported by the server, by catching the + `.BadAuthenticationType` exception raised. + + :param str username: the username to authenticate as + :return: + list of auth types permissible for the next stage of + authentication (normally empty) + + :raises: + `.BadAuthenticationType` -- if "none" authentication isn't allowed + by the server for this user + :raises: + `.SSHException` -- if the authentication failed due to a network + error + + .. versionadded:: 1.5 + """ + if (not self.active) or (not self.initial_kex_done): + raise SSHException("No existing session") + my_event = threading.Event() + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_none(username, my_event) + return self.auth_handler.wait_for_response(my_event) + + def auth_password(self, username, password, event=None, fallback=True): + """ + Authenticate to the server using a password. The username and password + are sent over an encrypted link. + + If an ``event`` is passed in, this method will return immediately, and + the event will be triggered once authentication succeeds or fails. On + success, `is_authenticated` will return ``True``. On failure, you may + use `get_exception` to get more detailed error information. + + Since 1.1, if no event is passed, this method will block until the + authentication succeeds or fails. On failure, an exception is raised. + Otherwise, the method simply returns. + + Since 1.5, if no event is passed and ``fallback`` is ``True`` (the + default), if the server doesn't support plain password authentication + but does support so-called "keyboard-interactive" mode, an attempt + will be made to authenticate using this interactive mode. If it fails, + the normal exception will be thrown as if the attempt had never been + made. This is useful for some recent Gentoo and Debian distributions, + which turn off plain password authentication in a misguided belief + that interactive authentication is "more secure". (It's not.) + + If the server requires multi-step authentication (which is very rare), + this method will return a list of auth types permissible for the next + step. Otherwise, in the normal case, an empty list is returned. + + :param str username: the username to authenticate as + :param basestring password: the password to authenticate with + :param .threading.Event event: + an event to trigger when the authentication attempt is complete + (whether it was successful or not) + :param bool fallback: + ``True`` if an attempt at an automated "interactive" password auth + should be made if the server doesn't support normal password auth + :return: + list of auth types permissible for the next stage of + authentication (normally empty) + + :raises: + `.BadAuthenticationType` -- if password authentication isn't + allowed by the server for this user (and no event was passed in) + :raises: + `.AuthenticationException` -- if the authentication failed (and no + event was passed in) + :raises: `.SSHException` -- if there was a network error + """ + if (not self.active) or (not self.initial_kex_done): + # we should never try to send the password unless we're on a secure + # link + raise SSHException("No existing session") + if event is None: + my_event = threading.Event() + else: + my_event = event + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_password(username, password, my_event) + if event is not None: + # caller wants to wait for event themselves + return [] + try: + return self.auth_handler.wait_for_response(my_event) + except BadAuthenticationType as e: + # if password auth isn't allowed, but keyboard-interactive *is*, + # try to fudge it + if not fallback or ("keyboard-interactive" not in e.allowed_types): + raise + try: + + def handler(title, instructions, fields): + if len(fields) > 1: + raise SSHException("Fallback authentication failed.") + if len(fields) == 0: + # for some reason, at least on os x, a 2nd request will + # be made with zero fields requested. maybe it's just + # to try to fake out automated scripting of the exact + # type we're doing here. *shrug* :) + return [] + return [password] + + return self.auth_interactive(username, handler) + except SSHException: + # attempt failed; just raise the original exception + raise e + + def auth_publickey(self, username, key, event=None): + """ + Authenticate to the server using a private key. The key is used to + sign data from the server, so it must include the private part. + + If an ``event`` is passed in, this method will return immediately, and + the event will be triggered once authentication succeeds or fails. On + success, `is_authenticated` will return ``True``. On failure, you may + use `get_exception` to get more detailed error information. + + Since 1.1, if no event is passed, this method will block until the + authentication succeeds or fails. On failure, an exception is raised. + Otherwise, the method simply returns. + + If the server requires multi-step authentication (which is very rare), + this method will return a list of auth types permissible for the next + step. Otherwise, in the normal case, an empty list is returned. + + :param str username: the username to authenticate as + :param .PKey key: the private key to authenticate with + :param .threading.Event event: + an event to trigger when the authentication attempt is complete + (whether it was successful or not) + :return: + list of auth types permissible for the next stage of + authentication (normally empty) + + :raises: + `.BadAuthenticationType` -- if public-key authentication isn't + allowed by the server for this user (and no event was passed in) + :raises: + `.AuthenticationException` -- if the authentication failed (and no + event was passed in) + :raises: `.SSHException` -- if there was a network error + """ + if (not self.active) or (not self.initial_kex_done): + # we should never try to authenticate unless we're on a secure link + raise SSHException("No existing session") + if event is None: + my_event = threading.Event() + else: + my_event = event + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_publickey(username, key, my_event) + if event is not None: + # caller wants to wait for event themselves + return [] + return self.auth_handler.wait_for_response(my_event) + + def auth_interactive(self, username, handler, submethods=""): + """ + Authenticate to the server interactively. A handler is used to answer + arbitrary questions from the server. On many servers, this is just a + dumb wrapper around PAM. + + This method will block until the authentication succeeds or fails, + periodically calling the handler asynchronously to get answers to + authentication questions. The handler may be called more than once + if the server continues to ask questions. + + The handler is expected to be a callable that will handle calls of the + form: ``handler(title, instructions, prompt_list)``. The ``title`` is + meant to be a dialog-window title, and the ``instructions`` are user + instructions (both are strings). ``prompt_list`` will be a list of + prompts, each prompt being a tuple of ``(str, bool)``. The string is + the prompt and the boolean indicates whether the user text should be + echoed. + + A sample call would thus be: + ``handler('title', 'instructions', [('Password:', False)])``. + + The handler should return a list or tuple of answers to the server's + questions. + + If the server requires multi-step authentication (which is very rare), + this method will return a list of auth types permissible for the next + step. Otherwise, in the normal case, an empty list is returned. + + :param str username: the username to authenticate as + :param callable handler: a handler for responding to server questions + :param str submethods: a string list of desired submethods (optional) + :return: + list of auth types permissible for the next stage of + authentication (normally empty). + + :raises: `.BadAuthenticationType` -- if public-key authentication isn't + allowed by the server for this user + :raises: `.AuthenticationException` -- if the authentication failed + :raises: `.SSHException` -- if there was a network error + + .. versionadded:: 1.5 + """ + if (not self.active) or (not self.initial_kex_done): + # we should never try to authenticate unless we're on a secure link + raise SSHException("No existing session") + my_event = threading.Event() + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_interactive( + username, handler, my_event, submethods + ) + return self.auth_handler.wait_for_response(my_event) + + def auth_interactive_dumb(self, username, handler=None, submethods=""): + """ + Authenticate to the server interactively but dumber. + Just print the prompt and / or instructions to stdout and send back + the response. This is good for situations where partial auth is + achieved by key and then the user has to enter a 2fac token. + """ + + if not handler: + + def handler(title, instructions, prompt_list): + answers = [] + if title: + print(title.strip()) + if instructions: + print(instructions.strip()) + for prompt, show_input in prompt_list: + print(prompt.strip(), end=" ") + answers.append(input()) + return answers + + return self.auth_interactive(username, handler, submethods) + + def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds): + """ + Authenticate to the Server using GSS-API / SSPI. + + :param str username: The username to authenticate as + :param str gss_host: The target host + :param bool gss_deleg_creds: Delegate credentials or not + :return: list of auth types permissible for the next stage of + authentication (normally empty) + :raises: `.BadAuthenticationType` -- if gssapi-with-mic isn't + allowed by the server (and no event was passed in) + :raises: + `.AuthenticationException` -- if the authentication failed (and no + event was passed in) + :raises: `.SSHException` -- if there was a network error + """ + if (not self.active) or (not self.initial_kex_done): + # we should never try to authenticate unless we're on a secure link + raise SSHException("No existing session") + my_event = threading.Event() + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_gssapi_with_mic( + username, gss_host, gss_deleg_creds, my_event + ) + return self.auth_handler.wait_for_response(my_event) + + def auth_gssapi_keyex(self, username): + """ + Authenticate to the server with GSS-API/SSPI if GSS-API kex is in use. + + :param str username: The username to authenticate as. + :returns: + a list of auth types permissible for the next stage of + authentication (normally empty) + :raises: `.BadAuthenticationType` -- + if GSS-API Key Exchange was not performed (and no event was passed + in) + :raises: `.AuthenticationException` -- + if the authentication failed (and no event was passed in) + :raises: `.SSHException` -- if there was a network error + """ + if (not self.active) or (not self.initial_kex_done): + # we should never try to authenticate unless we're on a secure link + raise SSHException("No existing session") + my_event = threading.Event() + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_gssapi_keyex(username, my_event) + return self.auth_handler.wait_for_response(my_event) + + def set_log_channel(self, name): + """ + Set the channel for this transport's logging. The default is + ``"paramiko.transport"`` but it can be set to anything you want. (See + the `.logging` module for more info.) SSH Channels will log to a + sub-channel of the one specified. + + :param str name: new channel name for logging + + .. versionadded:: 1.1 + """ + self.log_name = name + self.logger = util.get_logger(name) + self.packetizer.set_log(self.logger) + + def get_log_channel(self): + """ + Return the channel name used for this transport's logging. + + :return: channel name as a `str` + + .. versionadded:: 1.2 + """ + return self.log_name + + def set_hexdump(self, hexdump): + """ + Turn on/off logging a hex dump of protocol traffic at DEBUG level in + the logs. Normally you would want this off (which is the default), + but if you are debugging something, it may be useful. + + :param bool hexdump: + ``True`` to log protocol traffix (in hex) to the log; ``False`` + otherwise. + """ + self.packetizer.set_hexdump(hexdump) + + def get_hexdump(self): + """ + Return ``True`` if the transport is currently logging hex dumps of + protocol traffic. + + :return: ``True`` if hex dumps are being logged, else ``False``. + + .. versionadded:: 1.4 + """ + return self.packetizer.get_hexdump() + + def use_compression(self, compress=True): + """ + Turn on/off compression. This will only have an affect before starting + the transport (ie before calling `connect`, etc). By default, + compression is off since it negatively affects interactive sessions. + + :param bool compress: + ``True`` to ask the remote client/server to compress traffic; + ``False`` to refuse compression + + .. versionadded:: 1.5.2 + """ + if compress: + self._preferred_compression = ("zlib@openssh.com", "zlib", "none") + else: + self._preferred_compression = ("none",) + + def getpeername(self): + """ + Return the address of the remote side of this Transport, if possible. + + This is effectively a wrapper around ``getpeername`` on the underlying + socket. If the socket-like object has no ``getpeername`` method, then + ``("unknown", 0)`` is returned. + + :return: + the address of the remote host, if known, as a ``(str, int)`` + tuple. + """ + gp = getattr(self.sock, "getpeername", None) + if gp is None: + return "unknown", 0 + return gp() + + def stop_thread(self): + self.active = False + self.packetizer.close() + # Keep trying to join() our main thread, quickly, until: + # * We join()ed successfully (self.is_alive() == False) + # * Or it looks like we've hit issue #520 (socket.recv hitting some + # race condition preventing it from timing out correctly), wherein + # our socket and packetizer are both closed (but where we'd + # otherwise be sitting forever on that recv()). + while ( + self.is_alive() + and self is not threading.current_thread() + and not self.sock._closed + and not self.packetizer.closed + ): + self.join(0.1) + + # internals... + + # TODO 4.0: make a public alias for this because multiple other classes + # already explicitly rely on it...or just rewrite logging :D + def _log(self, level, msg, *args): + if issubclass(type(msg), list): + for m in msg: + self.logger.log(level, m) + else: + self.logger.log(level, msg, *args) + + def _get_modulus_pack(self): + """used by KexGex to find primes for group exchange""" + return self._modulus_pack + + def _next_channel(self): + """you are holding the lock""" + chanid = self._channel_counter + while self._channels.get(chanid) is not None: + self._channel_counter = (self._channel_counter + 1) & 0xFFFFFF + chanid = self._channel_counter + self._channel_counter = (self._channel_counter + 1) & 0xFFFFFF + return chanid + + def _unlink_channel(self, chanid): + """used by a Channel to remove itself from the active channel list""" + self._channels.delete(chanid) + + def _send_message(self, data): + self.packetizer.send_message(data) + + def _send_user_message(self, data): + """ + send a message, but block if we're in key negotiation. this is used + for user-initiated requests. + """ + start = time.time() + while True: + self.clear_to_send.wait(0.1) + if not self.active: + self._log( + DEBUG, "Dropping user packet because connection is dead." + ) # noqa + return + self.clear_to_send_lock.acquire() + if self.clear_to_send.is_set(): + break + self.clear_to_send_lock.release() + if time.time() > start + self.clear_to_send_timeout: + raise SSHException( + "Key-exchange timed out waiting for key negotiation" + ) # noqa + try: + self._send_message(data) + finally: + self.clear_to_send_lock.release() + + def _set_K_H(self, k, h): + """ + Used by a kex obj to set the K (root key) and H (exchange hash). + """ + self.K = k + self.H = h + if self.session_id is None: + self.session_id = h + + def _expect_packet(self, *ptypes): + """ + Used by a kex obj to register the next packet type it expects to see. + """ + self._expected_packet = tuple(ptypes) + + def _verify_key(self, host_key, sig): + key = self._key_info[self.host_key_type](Message(host_key)) + if key is None: + raise SSHException("Unknown host key type") + if not key.verify_ssh_sig(self.H, Message(sig)): + raise SSHException( + "Signature verification ({}) failed.".format( + self.host_key_type + ) + ) # noqa + self.host_key = key + + def _compute_key(self, id, nbytes): + """id is 'A' - 'F' for the various keys used by ssh""" + m = Message() + m.add_mpint(self.K) + m.add_bytes(self.H) + m.add_byte(b(id)) + m.add_bytes(self.session_id) + # Fallback to SHA1 for kex engines that fail to specify a hex + # algorithm, or for e.g. transport tests that don't run kexinit. + hash_algo = getattr(self.kex_engine, "hash_algo", None) + hash_select_msg = "kex engine {} specified hash_algo {!r}".format( + self.kex_engine.__class__.__name__, hash_algo + ) + if hash_algo is None: + hash_algo = sha1 + hash_select_msg += ", falling back to sha1" + if not hasattr(self, "_logged_hash_selection"): + self._log(DEBUG, hash_select_msg) + setattr(self, "_logged_hash_selection", True) + out = sofar = hash_algo(m.asbytes()).digest() + while len(out) < nbytes: + m = Message() + m.add_mpint(self.K) + m.add_bytes(self.H) + m.add_bytes(sofar) + digest = hash_algo(m.asbytes()).digest() + out += digest + sofar += digest + return out[:nbytes] + + def _get_engine(self, name, key, iv=None, operation=None, aead=False): + if name not in self._cipher_info: + raise SSHException("Unknown cipher " + name) + info = self._cipher_info[name] + algorithm = info["class"](key) + # AEAD types (eg GCM) use their algorithm class /as/ the encryption + # engine (they expose the same encrypt/decrypt API as a CipherContext) + if aead: + return algorithm + # All others go through the Cipher class. + cipher = Cipher( + algorithm=algorithm, + # TODO: why is this getting tickled in aesgcm mode??? + mode=info["mode"](iv), + backend=default_backend(), + ) + if operation is self._ENCRYPT: + return cipher.encryptor() + else: + return cipher.decryptor() + + def _set_forward_agent_handler(self, handler): + if handler is None: + + def default_handler(channel): + self._queue_incoming_channel(channel) + + self._forward_agent_handler = default_handler + else: + self._forward_agent_handler = handler + + def _set_x11_handler(self, handler): + # only called if a channel has turned on x11 forwarding + if handler is None: + # by default, use the same mechanism as accept() + def default_handler(channel, src_addr_port): + self._queue_incoming_channel(channel) + + self._x11_handler = default_handler + else: + self._x11_handler = handler + + def _queue_incoming_channel(self, channel): + self.lock.acquire() + try: + self.server_accepts.append(channel) + self.server_accept_cv.notify() + finally: + self.lock.release() + + def _sanitize_window_size(self, window_size): + if window_size is None: + window_size = self.default_window_size + return clamp_value(MIN_WINDOW_SIZE, window_size, MAX_WINDOW_SIZE) + + def _sanitize_packet_size(self, max_packet_size): + if max_packet_size is None: + max_packet_size = self.default_max_packet_size + return clamp_value(MIN_PACKET_SIZE, max_packet_size, MAX_WINDOW_SIZE) + + def _ensure_authed(self, ptype, message): + """ + Checks message type against current auth state. + + If server mode, and auth has not succeeded, and the message is of a + post-auth type (channel open or global request) an appropriate error + response Message is crafted and returned to caller for sending. + + Otherwise (client mode, authed, or pre-auth message) returns None. + """ + if ( + not self.server_mode + or ptype <= HIGHEST_USERAUTH_MESSAGE_ID + or self.is_authenticated() + ): + return None + # WELP. We must be dealing with someone trying to do non-auth things + # without being authed. Tell them off, based on message class. + reply = Message() + # Global requests have no details, just failure. + if ptype == MSG_GLOBAL_REQUEST: + reply.add_byte(cMSG_REQUEST_FAILURE) + # Channel opens let us reject w/ a specific type + message. + elif ptype == MSG_CHANNEL_OPEN: + kind = message.get_text() # noqa + chanid = message.get_int() + reply.add_byte(cMSG_CHANNEL_OPEN_FAILURE) + reply.add_int(chanid) + reply.add_int(OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED) + reply.add_string("") + reply.add_string("en") + # NOTE: Post-open channel messages do not need checking; the above will + # reject attempts to open channels, meaning that even if a malicious + # user tries to send a MSG_CHANNEL_REQUEST, it will simply fall under + # the logic that handles unknown channel IDs (as the channel list will + # be empty.) + return reply + + def _enforce_strict_kex(self, ptype): + """ + Conditionally raise `MessageOrderError` during strict initial kex. + + This method should only be called inside code that handles non-KEXINIT + messages; it does not interrogate ``ptype`` besides using it to log + more accurately. + """ + if self.agreed_on_strict_kex and not self.initial_kex_done: + name = MSG_NAMES.get(ptype, f"msg {ptype}") + raise MessageOrderError( + f"In strict-kex mode, but was sent {name!r}!" + ) + + def run(self): + # (use the exposed "run" method, because if we specify a thread target + # of a private method, threading.Thread will keep a reference to it + # indefinitely, creating a GC cycle and not letting Transport ever be + # GC'd. it's a bug in Thread.) + + # Hold reference to 'sys' so we can test sys.modules to detect + # interpreter shutdown. + self.sys = sys + + # active=True occurs before the thread is launched, to avoid a race + _active_threads.append(self) + tid = hex(id(self) & xffffffff) + if self.server_mode: + self._log(DEBUG, "starting thread (server mode): {}".format(tid)) + else: + self._log(DEBUG, "starting thread (client mode): {}".format(tid)) + try: + try: + self.packetizer.write_all(b(self.local_version + "\r\n")) + self._log( + DEBUG, + "Local version/idstring: {}".format(self.local_version), + ) # noqa + self._check_banner() + # The above is actually very much part of the handshake, but + # sometimes the banner can be read but the machine is not + # responding, for example when the remote ssh daemon is loaded + # in to memory but we can not read from the disk/spawn a new + # shell. + # Make sure we can specify a timeout for the initial handshake. + # Reuse the banner timeout for now. + self.packetizer.start_handshake(self.handshake_timeout) + self._send_kex_init() + self._expect_packet(MSG_KEXINIT) + + while self.active: + if self.packetizer.need_rekey() and not self.in_kex: + self._send_kex_init() + try: + ptype, m = self.packetizer.read_message() + except NeedRekeyException: + continue + if ptype == MSG_IGNORE: + self._enforce_strict_kex(ptype) + continue + elif ptype == MSG_DISCONNECT: + self._parse_disconnect(m) + break + elif ptype == MSG_DEBUG: + self._enforce_strict_kex(ptype) + self._parse_debug(m) + continue + if len(self._expected_packet) > 0: + if ptype not in self._expected_packet: + exc_class = SSHException + if self.agreed_on_strict_kex: + exc_class = MessageOrderError + raise exc_class( + "Expecting packet from {!r}, got {:d}".format( + self._expected_packet, ptype + ) + ) # noqa + self._expected_packet = tuple() + # These message IDs indicate key exchange & will differ + # depending on exact exchange algorithm + if (ptype >= 30) and (ptype <= 41): + self.kex_engine.parse_next(ptype, m) + continue + + if ptype in self._handler_table: + error_msg = self._ensure_authed(ptype, m) + if error_msg: + self._send_message(error_msg) + else: + self._handler_table[ptype](m) + elif ptype in self._channel_handler_table: + chanid = m.get_int() + chan = self._channels.get(chanid) + if chan is not None: + self._channel_handler_table[ptype](chan, m) + elif chanid in self.channels_seen: + self._log( + DEBUG, + "Ignoring message for dead channel {:d}".format( # noqa + chanid + ), + ) + else: + self._log( + ERROR, + "Channel request for unknown channel {:d}".format( # noqa + chanid + ), + ) + break + elif ( + self.auth_handler is not None + and ptype in self.auth_handler._handler_table + ): + handler = self.auth_handler._handler_table[ptype] + handler(m) + if len(self._expected_packet) > 0: + continue + else: + # Respond with "I don't implement this particular + # message type" message (unless the message type was + # itself literally MSG_UNIMPLEMENTED, in which case, we + # just shut up to avoid causing a useless loop). + name = MSG_NAMES[ptype] + warning = "Oops, unhandled type {} ({!r})".format( + ptype, name + ) + self._log(WARNING, warning) + if ptype != MSG_UNIMPLEMENTED: + msg = Message() + msg.add_byte(cMSG_UNIMPLEMENTED) + msg.add_int(m.seqno) + self._send_message(msg) + self.packetizer.complete_handshake() + except SSHException as e: + self._log( + ERROR, + "Exception ({}): {}".format( + "server" if self.server_mode else "client", e + ), + ) + self._log(ERROR, util.tb_strings()) + self.saved_exception = e + except EOFError as e: + self._log(DEBUG, "EOF in transport thread") + self.saved_exception = e + except socket.error as e: + if type(e.args) is tuple: + if e.args: + emsg = "{} ({:d})".format(e.args[1], e.args[0]) + else: # empty tuple, e.g. socket.timeout + emsg = str(e) or repr(e) + else: + emsg = e.args + self._log(ERROR, "Socket exception: " + emsg) + self.saved_exception = e + except Exception as e: + self._log(ERROR, "Unknown exception: " + str(e)) + self._log(ERROR, util.tb_strings()) + self.saved_exception = e + _active_threads.remove(self) + for chan in list(self._channels.values()): + chan._unlink() + if self.active: + self.active = False + self.packetizer.close() + if self.completion_event is not None: + self.completion_event.set() + if self.auth_handler is not None: + self.auth_handler.abort() + for event in self.channel_events.values(): + event.set() + try: + self.lock.acquire() + self.server_accept_cv.notify() + finally: + self.lock.release() + self.sock.close() + except: + # Don't raise spurious 'NoneType has no attribute X' errors when we + # wake up during interpreter shutdown. Or rather -- raise + # everything *if* sys.modules (used as a convenient sentinel) + # appears to still exist. + if self.sys.modules is not None: + raise + + def _log_agreement(self, which, local, remote): + # Log useful, non-duplicative line re: an agreed-upon algorithm. + # Old code implied algorithms could be asymmetrical (different for + # inbound vs outbound) so we preserve that possibility. + msg = "{}: ".format(which) + if local == remote: + msg += local + else: + msg += "local={}, remote={}".format(local, remote) + self._log(DEBUG, msg) + + # protocol stages + + def _negotiate_keys(self, m): + # throws SSHException on anything unusual + self.clear_to_send_lock.acquire() + try: + self.clear_to_send.clear() + finally: + self.clear_to_send_lock.release() + if self.local_kex_init is None: + # remote side wants to renegotiate + self._send_kex_init() + self._parse_kex_init(m) + self.kex_engine.start_kex() + + def _check_banner(self): + # this is slow, but we only have to do it once + for i in range(100): + # give them 15 seconds for the first line, then just 2 seconds + # each additional line. (some sites have very high latency.) + if i == 0: + timeout = self.banner_timeout + else: + timeout = 2 + try: + buf = self.packetizer.readline(timeout) + except ProxyCommandFailure: + raise + except Exception as e: + raise SSHException( + "Error reading SSH protocol banner" + str(e) + ) + if buf[:4] == "SSH-": + break + self._log(DEBUG, "Banner: " + buf) + if buf[:4] != "SSH-": + raise SSHException('Indecipherable protocol version "' + buf + '"') + # save this server version string for later + self.remote_version = buf + self._log(DEBUG, "Remote version/idstring: {}".format(buf)) + # pull off any attached comment + # NOTE: comment used to be stored in a variable and then...never used. + # since 2003. ca 877cd974b8182d26fa76d566072917ea67b64e67 + i = buf.find(" ") + if i >= 0: + buf = buf[:i] + # parse out version string and make sure it matches + segs = buf.split("-", 2) + if len(segs) < 3: + raise SSHException("Invalid SSH banner") + version = segs[1] + client = segs[2] + if version != "1.99" and version != "2.0": + msg = "Incompatible version ({} instead of 2.0)" + raise IncompatiblePeer(msg.format(version)) + msg = "Connected (version {}, client {})".format(version, client) + self._log(INFO, msg) + + def _send_kex_init(self): + """ + announce to the other side that we'd like to negotiate keys, and what + kind of key negotiation we support. + """ + self.clear_to_send_lock.acquire() + try: + self.clear_to_send.clear() + finally: + self.clear_to_send_lock.release() + self.gss_kex_used = False + self.in_kex = True + kex_algos = list(self.preferred_kex) + if self.server_mode: + mp_required_prefix = "diffie-hellman-group-exchange-sha" + kex_mp = [k for k in kex_algos if k.startswith(mp_required_prefix)] + if (self._modulus_pack is None) and (len(kex_mp) > 0): + # can't do group-exchange if we don't have a pack of potential + # primes + pkex = [ + k + for k in self.get_security_options().kex + if not k.startswith(mp_required_prefix) + ] + self.get_security_options().kex = pkex + available_server_keys = list( + filter( + list(self.server_key_dict.keys()).__contains__, + # TODO: ensure tests will catch if somebody streamlines + # this by mistake - case is the admittedly silly one where + # the only calls to add_server_key() contain keys which + # were filtered out of the below via disabled_algorithms. + # If this is streamlined, we would then be allowing the + # disabled algorithm(s) for hostkey use + # TODO: honestly this prob just wants to get thrown out + # when we make kex configuration more straightforward + self.preferred_keys, + ) + ) + else: + available_server_keys = self.preferred_keys + # Signal support for MSG_EXT_INFO so server will send it to us. + # NOTE: doing this here handily means we don't even consider this + # value when agreeing on real kex algo to use (which is a common + # pitfall when adding this apparently). + kex_algos.append("ext-info-c") + + # Similar to ext-info, but used in both server modes, so done outside + # of above if/else. + if self.advertise_strict_kex: + which = "s" if self.server_mode else "c" + kex_algos.append(f"kex-strict-{which}-v00@openssh.com") + + m = Message() + m.add_byte(cMSG_KEXINIT) + m.add_bytes(os.urandom(16)) + m.add_list(kex_algos) + m.add_list(available_server_keys) + m.add_list(self.preferred_ciphers) + m.add_list(self.preferred_ciphers) + m.add_list(self.preferred_macs) + m.add_list(self.preferred_macs) + m.add_list(self.preferred_compression) + m.add_list(self.preferred_compression) + m.add_string(bytes()) + m.add_string(bytes()) + m.add_boolean(False) + m.add_int(0) + # save a copy for later (needed to compute a hash) + self.local_kex_init = self._latest_kex_init = m.asbytes() + self._send_message(m) + + def _really_parse_kex_init(self, m, ignore_first_byte=False): + parsed = {} + if ignore_first_byte: + m.get_byte() + m.get_bytes(16) # cookie, discarded + parsed["kex_algo_list"] = m.get_list() + parsed["server_key_algo_list"] = m.get_list() + parsed["client_encrypt_algo_list"] = m.get_list() + parsed["server_encrypt_algo_list"] = m.get_list() + parsed["client_mac_algo_list"] = m.get_list() + parsed["server_mac_algo_list"] = m.get_list() + parsed["client_compress_algo_list"] = m.get_list() + parsed["server_compress_algo_list"] = m.get_list() + parsed["client_lang_list"] = m.get_list() + parsed["server_lang_list"] = m.get_list() + parsed["kex_follows"] = m.get_boolean() + m.get_int() # unused + return parsed + + def _get_latest_kex_init(self): + return self._really_parse_kex_init( + Message(self._latest_kex_init), + ignore_first_byte=True, + ) + + def _parse_kex_init(self, m): + parsed = self._really_parse_kex_init(m) + kex_algo_list = parsed["kex_algo_list"] + server_key_algo_list = parsed["server_key_algo_list"] + client_encrypt_algo_list = parsed["client_encrypt_algo_list"] + server_encrypt_algo_list = parsed["server_encrypt_algo_list"] + client_mac_algo_list = parsed["client_mac_algo_list"] + server_mac_algo_list = parsed["server_mac_algo_list"] + client_compress_algo_list = parsed["client_compress_algo_list"] + server_compress_algo_list = parsed["server_compress_algo_list"] + client_lang_list = parsed["client_lang_list"] + server_lang_list = parsed["server_lang_list"] + kex_follows = parsed["kex_follows"] + + self._log(DEBUG, "=== Key exchange possibilities ===") + for prefix, value in ( + ("kex algos", kex_algo_list), + ("server key", server_key_algo_list), + # TODO: shouldn't these two lines say "cipher" to match usual + # terminology (including elsewhere in paramiko!)? + ("client encrypt", client_encrypt_algo_list), + ("server encrypt", server_encrypt_algo_list), + ("client mac", client_mac_algo_list), + ("server mac", server_mac_algo_list), + ("client compress", client_compress_algo_list), + ("server compress", server_compress_algo_list), + ("client lang", client_lang_list), + ("server lang", server_lang_list), + ): + if value == [""]: + value = [""] + value = ", ".join(value) + self._log(DEBUG, "{}: {}".format(prefix, value)) + self._log(DEBUG, "kex follows: {}".format(kex_follows)) + self._log(DEBUG, "=== Key exchange agreements ===") + + # Record, and strip out, ext-info and/or strict-kex non-algorithms + self._remote_ext_info = None + self._remote_strict_kex = None + to_pop = [] + for i, algo in enumerate(kex_algo_list): + if algo.startswith("ext-info-"): + self._remote_ext_info = algo + to_pop.insert(0, i) + elif algo.startswith("kex-strict-"): + # NOTE: this is what we are expecting from the /remote/ end. + which = "c" if self.server_mode else "s" + expected = f"kex-strict-{which}-v00@openssh.com" + # Set strict mode if agreed. + self.agreed_on_strict_kex = ( + algo == expected and self.advertise_strict_kex + ) + self._log( + DEBUG, f"Strict kex mode: {self.agreed_on_strict_kex}" + ) + to_pop.insert(0, i) + for i in to_pop: + kex_algo_list.pop(i) + + # CVE mitigation: expect zeroed-out seqno anytime we are performing kex + # init phase, if strict mode was negotiated. + if ( + self.agreed_on_strict_kex + and not self.initial_kex_done + and m.seqno != 0 + ): + raise MessageOrderError( + "In strict-kex mode, but KEXINIT was not the first packet!" + ) + + # as a server, we pick the first item in the client's list that we + # support. + # as a client, we pick the first item in our list that the server + # supports. + if self.server_mode: + agreed_kex = list( + filter(self.preferred_kex.__contains__, kex_algo_list) + ) + else: + agreed_kex = list( + filter(kex_algo_list.__contains__, self.preferred_kex) + ) + if len(agreed_kex) == 0: + # TODO: do an auth-overhaul style aggregate exception here? + # TODO: would let us streamline log output & show all failures up + # front + raise IncompatiblePeer( + "Incompatible ssh peer (no acceptable kex algorithm)" + ) # noqa + self.kex_engine = self._kex_info[agreed_kex[0]](self) + self._log(DEBUG, "Kex: {}".format(agreed_kex[0])) + + if self.server_mode: + available_server_keys = list( + filter( + list(self.server_key_dict.keys()).__contains__, + self.preferred_keys, + ) + ) + agreed_keys = list( + filter( + available_server_keys.__contains__, server_key_algo_list + ) + ) + else: + agreed_keys = list( + filter(server_key_algo_list.__contains__, self.preferred_keys) + ) + if len(agreed_keys) == 0: + raise IncompatiblePeer( + "Incompatible ssh peer (no acceptable host key)" + ) # noqa + self.host_key_type = agreed_keys[0] + if self.server_mode and (self.get_server_key() is None): + raise IncompatiblePeer( + "Incompatible ssh peer (can't match requested host key type)" + ) # noqa + self._log_agreement("HostKey", agreed_keys[0], agreed_keys[0]) + + if self.server_mode: + agreed_local_ciphers = list( + filter( + self.preferred_ciphers.__contains__, + server_encrypt_algo_list, + ) + ) + agreed_remote_ciphers = list( + filter( + self.preferred_ciphers.__contains__, + client_encrypt_algo_list, + ) + ) + else: + agreed_local_ciphers = list( + filter( + client_encrypt_algo_list.__contains__, + self.preferred_ciphers, + ) + ) + agreed_remote_ciphers = list( + filter( + server_encrypt_algo_list.__contains__, + self.preferred_ciphers, + ) + ) + if len(agreed_local_ciphers) == 0 or len(agreed_remote_ciphers) == 0: + raise IncompatiblePeer( + "Incompatible ssh server (no acceptable ciphers)" + ) # noqa + self.local_cipher = agreed_local_ciphers[0] + self.remote_cipher = agreed_remote_ciphers[0] + self._log_agreement( + "Cipher", local=self.local_cipher, remote=self.remote_cipher + ) + + if self.server_mode: + agreed_remote_macs = list( + filter(self.preferred_macs.__contains__, client_mac_algo_list) + ) + agreed_local_macs = list( + filter(self.preferred_macs.__contains__, server_mac_algo_list) + ) + else: + agreed_local_macs = list( + filter(client_mac_algo_list.__contains__, self.preferred_macs) + ) + agreed_remote_macs = list( + filter(server_mac_algo_list.__contains__, self.preferred_macs) + ) + if (len(agreed_local_macs) == 0) or (len(agreed_remote_macs) == 0): + raise IncompatiblePeer( + "Incompatible ssh server (no acceptable macs)" + ) + self.local_mac = agreed_local_macs[0] + self.remote_mac = agreed_remote_macs[0] + self._log_agreement( + "MAC", local=self.local_mac, remote=self.remote_mac + ) + + if self.server_mode: + agreed_remote_compression = list( + filter( + self.preferred_compression.__contains__, + client_compress_algo_list, + ) + ) + agreed_local_compression = list( + filter( + self.preferred_compression.__contains__, + server_compress_algo_list, + ) + ) + else: + agreed_local_compression = list( + filter( + client_compress_algo_list.__contains__, + self.preferred_compression, + ) + ) + agreed_remote_compression = list( + filter( + server_compress_algo_list.__contains__, + self.preferred_compression, + ) + ) + if ( + len(agreed_local_compression) == 0 + or len(agreed_remote_compression) == 0 + ): + msg = "Incompatible ssh server (no acceptable compression)" + msg += " {!r} {!r} {!r}" + raise IncompatiblePeer( + msg.format( + agreed_local_compression, + agreed_remote_compression, + self.preferred_compression, + ) + ) + self.local_compression = agreed_local_compression[0] + self.remote_compression = agreed_remote_compression[0] + self._log_agreement( + "Compression", + local=self.local_compression, + remote=self.remote_compression, + ) + self._log(DEBUG, "=== End of kex handshake ===") + + # save for computing hash later... + # now wait! openssh has a bug (and others might too) where there are + # actually some extra bytes (one NUL byte in openssh's case) added to + # the end of the packet but not parsed. turns out we need to throw + # away those bytes because they aren't part of the hash. + self.remote_kex_init = cMSG_KEXINIT + m.get_so_far() + + def _activate_inbound(self): + """switch on newly negotiated encryption parameters for + inbound traffic""" + info = self._cipher_info[self.remote_cipher] + aead = info.get("is_aead", False) + block_size = info["block-size"] + key_size = info["key-size"] + # Non-AEAD/GCM type ciphers' IV size is their block size. + iv_size = info.get("iv-size", block_size) + if self.server_mode: + iv_in = self._compute_key("A", iv_size) + key_in = self._compute_key("C", key_size) + else: + iv_in = self._compute_key("B", iv_size) + key_in = self._compute_key("D", key_size) + + engine = self._get_engine( + name=self.remote_cipher, + key=key_in, + iv=iv_in, + operation=self._DECRYPT, + aead=aead, + ) + etm = (not aead) and "etm@openssh.com" in self.remote_mac + mac_size = self._mac_info[self.remote_mac]["size"] + mac_engine = self._mac_info[self.remote_mac]["class"] + # initial mac keys are done in the hash's natural size (not the + # potentially truncated transmission size) + if self.server_mode: + mac_key = self._compute_key("E", mac_engine().digest_size) + else: + mac_key = self._compute_key("F", mac_engine().digest_size) + + self.packetizer.set_inbound_cipher( + block_engine=engine, + block_size=block_size, + mac_engine=None if aead else mac_engine, + mac_size=16 if aead else mac_size, + mac_key=None if aead else mac_key, + etm=etm, + aead=aead, + iv_in=iv_in if aead else None, + ) + + compress_in = self._compression_info[self.remote_compression][1] + if compress_in is not None and ( + self.remote_compression != "zlib@openssh.com" or self.authenticated + ): + self._log(DEBUG, "Switching on inbound compression ...") + self.packetizer.set_inbound_compressor(compress_in()) + # Reset inbound sequence number if strict mode. + if self.agreed_on_strict_kex: + self._log( + DEBUG, + "Resetting inbound seqno after NEWKEYS due to strict mode", + ) + self.packetizer.reset_seqno_in() + + def _activate_outbound(self): + """switch on newly negotiated encryption parameters for + outbound traffic""" + m = Message() + m.add_byte(cMSG_NEWKEYS) + self._send_message(m) + # Reset outbound sequence number if strict mode. + if self.agreed_on_strict_kex: + self._log( + DEBUG, + "Resetting outbound seqno after NEWKEYS due to strict mode", + ) + self.packetizer.reset_seqno_out() + info = self._cipher_info[self.local_cipher] + aead = info.get("is_aead", False) + block_size = info["block-size"] + key_size = info["key-size"] + # Non-AEAD/GCM type ciphers' IV size is their block size. + iv_size = info.get("iv-size", block_size) + if self.server_mode: + iv_out = self._compute_key("B", iv_size) + key_out = self._compute_key("D", key_size) + else: + iv_out = self._compute_key("A", iv_size) + key_out = self._compute_key("C", key_size) + + engine = self._get_engine( + name=self.local_cipher, + key=key_out, + iv=iv_out, + operation=self._ENCRYPT, + aead=aead, + ) + etm = (not aead) and "etm@openssh.com" in self.local_mac + mac_size = self._mac_info[self.local_mac]["size"] + mac_engine = self._mac_info[self.local_mac]["class"] + # initial mac keys are done in the hash's natural size (not the + # potentially truncated transmission size) + if self.server_mode: + mac_key = self._compute_key("F", mac_engine().digest_size) + else: + mac_key = self._compute_key("E", mac_engine().digest_size) + sdctr = self.local_cipher.endswith("-ctr") + + self.packetizer.set_outbound_cipher( + block_engine=engine, + block_size=block_size, + mac_engine=None if aead else mac_engine, + mac_size=16 if aead else mac_size, + mac_key=None if aead else mac_key, + sdctr=sdctr, + etm=etm, + aead=aead, + iv_out=iv_out if aead else None, + ) + + compress_out = self._compression_info[self.local_compression][0] + if compress_out is not None and ( + self.local_compression != "zlib@openssh.com" or self.authenticated + ): + self._log(DEBUG, "Switching on outbound compression ...") + self.packetizer.set_outbound_compressor(compress_out()) + if not self.packetizer.need_rekey(): + self.in_kex = False + # If client indicated extension support, send that packet immediately + if ( + self.server_mode + and self.server_sig_algs + and self._remote_ext_info == "ext-info-c" + ): + extensions = {"server-sig-algs": ",".join(self.preferred_pubkeys)} + m = Message() + m.add_byte(cMSG_EXT_INFO) + m.add_int(len(extensions)) + for name, value in sorted(extensions.items()): + m.add_string(name) + m.add_string(value) + self._send_message(m) + # we always expect to receive NEWKEYS now + self._expect_packet(MSG_NEWKEYS) + + def _auth_trigger(self): + self.authenticated = True + # delayed initiation of compression + if self.local_compression == "zlib@openssh.com": + compress_out = self._compression_info[self.local_compression][0] + self._log(DEBUG, "Switching on outbound compression ...") + self.packetizer.set_outbound_compressor(compress_out()) + if self.remote_compression == "zlib@openssh.com": + compress_in = self._compression_info[self.remote_compression][1] + self._log(DEBUG, "Switching on inbound compression ...") + self.packetizer.set_inbound_compressor(compress_in()) + + def _parse_ext_info(self, msg): + # Packet is a count followed by that many key-string to possibly-bytes + # pairs. + extensions = {} + for _ in range(msg.get_int()): + name = msg.get_text() + value = msg.get_string() + extensions[name] = value + self._log(DEBUG, "Got EXT_INFO: {}".format(extensions)) + # NOTE: this should work ok in cases where a server sends /two/ such + # messages; the RFC explicitly states a 2nd one should overwrite the + # 1st. + self.server_extensions = extensions + + def _parse_newkeys(self, m): + self._log(DEBUG, "Switch to new keys ...") + self._activate_inbound() + # can also free a bunch of stuff here + self.local_kex_init = self.remote_kex_init = None + self.K = None + self.kex_engine = None + if self.server_mode and (self.auth_handler is None): + # create auth handler for server mode + self.auth_handler = AuthHandler(self) + if not self.initial_kex_done: + # this was the first key exchange + # (also signal to packetizer as it sometimes wants to know this + # status as well, eg when seqnos rollover) + self.initial_kex_done = self.packetizer._initial_kex_done = True + # send an event? + if self.completion_event is not None: + self.completion_event.set() + # it's now okay to send data again (if this was a re-key) + if not self.packetizer.need_rekey(): + self.in_kex = False + self.clear_to_send_lock.acquire() + try: + self.clear_to_send.set() + finally: + self.clear_to_send_lock.release() + return + + def _parse_disconnect(self, m): + code = m.get_int() + desc = m.get_text() + self._log(INFO, "Disconnect (code {:d}): {}".format(code, desc)) + + def _parse_global_request(self, m): + kind = m.get_text() + self._log(DEBUG, 'Received global request "{}"'.format(kind)) + want_reply = m.get_boolean() + if not self.server_mode: + self._log( + DEBUG, + 'Rejecting "{}" global request from server.'.format(kind), + ) + ok = False + elif kind == "tcpip-forward": + address = m.get_text() + port = m.get_int() + ok = self.server_object.check_port_forward_request(address, port) + if ok: + ok = (ok,) + elif kind == "cancel-tcpip-forward": + address = m.get_text() + port = m.get_int() + self.server_object.cancel_port_forward_request(address, port) + ok = True + else: + ok = self.server_object.check_global_request(kind, m) + extra = () + if type(ok) is tuple: + extra = ok + ok = True + if want_reply: + msg = Message() + if ok: + msg.add_byte(cMSG_REQUEST_SUCCESS) + msg.add(*extra) + else: + msg.add_byte(cMSG_REQUEST_FAILURE) + self._send_message(msg) + + def _parse_request_success(self, m): + self._log(DEBUG, "Global request successful.") + self.global_response = m + if self.completion_event is not None: + self.completion_event.set() + + def _parse_request_failure(self, m): + self._log(DEBUG, "Global request denied.") + self.global_response = None + if self.completion_event is not None: + self.completion_event.set() + + def _parse_channel_open_success(self, m): + chanid = m.get_int() + server_chanid = m.get_int() + server_window_size = m.get_int() + server_max_packet_size = m.get_int() + chan = self._channels.get(chanid) + if chan is None: + self._log(WARNING, "Success for unrequested channel! [??]") + return + self.lock.acquire() + try: + chan._set_remote_channel( + server_chanid, server_window_size, server_max_packet_size + ) + self._log(DEBUG, "Secsh channel {:d} opened.".format(chanid)) + if chanid in self.channel_events: + self.channel_events[chanid].set() + del self.channel_events[chanid] + finally: + self.lock.release() + return + + def _parse_channel_open_failure(self, m): + chanid = m.get_int() + reason = m.get_int() + reason_str = m.get_text() + m.get_text() # ignored language + reason_text = CONNECTION_FAILED_CODE.get(reason, "(unknown code)") + self._log( + ERROR, + "Secsh channel {:d} open FAILED: {}: {}".format( + chanid, reason_str, reason_text + ), + ) + self.lock.acquire() + try: + self.saved_exception = ChannelException(reason, reason_text) + if chanid in self.channel_events: + self._channels.delete(chanid) + if chanid in self.channel_events: + self.channel_events[chanid].set() + del self.channel_events[chanid] + finally: + self.lock.release() + return + + def _parse_channel_open(self, m): + kind = m.get_text() + chanid = m.get_int() + initial_window_size = m.get_int() + max_packet_size = m.get_int() + reject = False + if ( + kind == "auth-agent@openssh.com" + and self._forward_agent_handler is not None + ): + self._log(DEBUG, "Incoming forward agent connection") + self.lock.acquire() + try: + my_chanid = self._next_channel() + finally: + self.lock.release() + elif (kind == "x11") and (self._x11_handler is not None): + origin_addr = m.get_text() + origin_port = m.get_int() + self._log( + DEBUG, + "Incoming x11 connection from {}:{:d}".format( + origin_addr, origin_port + ), + ) + self.lock.acquire() + try: + my_chanid = self._next_channel() + finally: + self.lock.release() + elif (kind == "forwarded-tcpip") and (self._tcp_handler is not None): + server_addr = m.get_text() + server_port = m.get_int() + origin_addr = m.get_text() + origin_port = m.get_int() + self._log( + DEBUG, + "Incoming tcp forwarded connection from {}:{:d}".format( + origin_addr, origin_port + ), + ) + self.lock.acquire() + try: + my_chanid = self._next_channel() + finally: + self.lock.release() + elif not self.server_mode: + self._log( + DEBUG, + 'Rejecting "{}" channel request from server.'.format(kind), + ) + reject = True + reason = OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED + else: + self.lock.acquire() + try: + my_chanid = self._next_channel() + finally: + self.lock.release() + if kind == "direct-tcpip": + # handle direct-tcpip requests coming from the client + dest_addr = m.get_text() + dest_port = m.get_int() + origin_addr = m.get_text() + origin_port = m.get_int() + reason = self.server_object.check_channel_direct_tcpip_request( + my_chanid, + (origin_addr, origin_port), + (dest_addr, dest_port), + ) + else: + reason = self.server_object.check_channel_request( + kind, my_chanid + ) + if reason != OPEN_SUCCEEDED: + self._log( + DEBUG, + 'Rejecting "{}" channel request from client.'.format(kind), + ) + reject = True + if reject: + msg = Message() + msg.add_byte(cMSG_CHANNEL_OPEN_FAILURE) + msg.add_int(chanid) + msg.add_int(reason) + msg.add_string("") + msg.add_string("en") + self._send_message(msg) + return + + chan = Channel(my_chanid) + self.lock.acquire() + try: + self._channels.put(my_chanid, chan) + self.channels_seen[my_chanid] = True + chan._set_transport(self) + chan._set_window( + self.default_window_size, self.default_max_packet_size + ) + chan._set_remote_channel( + chanid, initial_window_size, max_packet_size + ) + finally: + self.lock.release() + m = Message() + m.add_byte(cMSG_CHANNEL_OPEN_SUCCESS) + m.add_int(chanid) + m.add_int(my_chanid) + m.add_int(self.default_window_size) + m.add_int(self.default_max_packet_size) + self._send_message(m) + self._log( + DEBUG, "Secsh channel {:d} ({}) opened.".format(my_chanid, kind) + ) + if kind == "auth-agent@openssh.com": + self._forward_agent_handler(chan) + elif kind == "x11": + self._x11_handler(chan, (origin_addr, origin_port)) + elif kind == "forwarded-tcpip": + chan.origin_addr = (origin_addr, origin_port) + self._tcp_handler( + chan, (origin_addr, origin_port), (server_addr, server_port) + ) + else: + self._queue_incoming_channel(chan) + + def _parse_debug(self, m): + m.get_boolean() # always_display + msg = m.get_string() + m.get_string() # language + self._log(DEBUG, "Debug msg: {}".format(util.safe_string(msg))) + + def _get_subsystem_handler(self, name): + try: + self.lock.acquire() + if name not in self.subsystem_table: + return None, [], {} + return self.subsystem_table[name] + finally: + self.lock.release() + + _channel_handler_table = { + MSG_CHANNEL_SUCCESS: Channel._request_success, + MSG_CHANNEL_FAILURE: Channel._request_failed, + MSG_CHANNEL_DATA: Channel._feed, + MSG_CHANNEL_EXTENDED_DATA: Channel._feed_extended, + MSG_CHANNEL_WINDOW_ADJUST: Channel._window_adjust, + MSG_CHANNEL_REQUEST: Channel._handle_request, + MSG_CHANNEL_EOF: Channel._handle_eof, + MSG_CHANNEL_CLOSE: Channel._handle_close, + } + + +# TODO 4.0: drop this, we barely use it ourselves, it badly replicates the +# Transport-internal algorithm management, AND does so in a way which doesn't +# honor newer things like disabled_algorithms! +class SecurityOptions: + """ + Simple object containing the security preferences of an ssh transport. + These are tuples of acceptable ciphers, digests, key types, and key + exchange algorithms, listed in order of preference. + + Changing the contents and/or order of these fields affects the underlying + `.Transport` (but only if you change them before starting the session). + If you try to add an algorithm that paramiko doesn't recognize, + ``ValueError`` will be raised. If you try to assign something besides a + tuple to one of the fields, ``TypeError`` will be raised. + """ + + __slots__ = "_transport" + + def __init__(self, transport): + self._transport = transport + + def __repr__(self): + """ + Returns a string representation of this object, for debugging. + """ + return "".format(self._transport) + + def _set(self, name, orig, x): + if type(x) is list: + x = tuple(x) + if type(x) is not tuple: + raise TypeError("expected tuple or list") + possible = list(getattr(self._transport, orig).keys()) + forbidden = [n for n in x if n not in possible] + if len(forbidden) > 0: + raise ValueError("unknown cipher") + setattr(self._transport, name, x) + + @property + def ciphers(self): + """Symmetric encryption ciphers""" + return self._transport._preferred_ciphers + + @ciphers.setter + def ciphers(self, x): + self._set("_preferred_ciphers", "_cipher_info", x) + + @property + def digests(self): + """Digest (one-way hash) algorithms""" + return self._transport._preferred_macs + + @digests.setter + def digests(self, x): + self._set("_preferred_macs", "_mac_info", x) + + @property + def key_types(self): + """Public-key algorithms""" + return self._transport._preferred_keys + + @key_types.setter + def key_types(self, x): + self._set("_preferred_keys", "_key_info", x) + + @property + def kex(self): + """Key exchange algorithms""" + return self._transport._preferred_kex + + @kex.setter + def kex(self, x): + self._set("_preferred_kex", "_kex_info", x) + + @property + def compression(self): + """Compression algorithms""" + return self._transport._preferred_compression + + @compression.setter + def compression(self, x): + self._set("_preferred_compression", "_compression_info", x) + + +class ChannelMap: + def __init__(self): + # (id -> Channel) + self._map = weakref.WeakValueDictionary() + self._lock = threading.Lock() + + def put(self, chanid, chan): + self._lock.acquire() + try: + self._map[chanid] = chan + finally: + self._lock.release() + + def get(self, chanid): + self._lock.acquire() + try: + return self._map.get(chanid, None) + finally: + self._lock.release() + + def delete(self, chanid): + self._lock.acquire() + try: + try: + del self._map[chanid] + except KeyError: + pass + finally: + self._lock.release() + + def values(self): + self._lock.acquire() + try: + return list(self._map.values()) + finally: + self._lock.release() + + def __len__(self): + self._lock.acquire() + try: + return len(self._map) + finally: + self._lock.release() + + +class ServiceRequestingTransport(Transport): + """ + Transport, but also handling service requests, like it oughtta! + + .. versionadded:: 3.2 + """ + + # NOTE: this purposefully duplicates some of the parent class in order to + # modernize, refactor, etc. The intent is that eventually we will collapse + # this one onto the parent in a backwards incompatible release. + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._service_userauth_accepted = False + self._handler_table[MSG_SERVICE_ACCEPT] = self._parse_service_accept + + def _parse_service_accept(self, m): + service = m.get_text() + # Short-circuit for any service name not ssh-userauth. + # NOTE: it's technically possible for 'service name' in + # SERVICE_REQUEST/ACCEPT messages to be "ssh-connection" -- + # but I don't see evidence of Paramiko ever initiating or expecting to + # receive one of these. We /do/ see the 'service name' field in + # MSG_USERAUTH_REQUEST/ACCEPT/FAILURE set to this string, but that is a + # different set of handlers, so...! + if service != "ssh-userauth": + # TODO 4.0: consider erroring here (with an ability to opt out?) + # instead as it probably means something went Very Wrong. + self._log( + DEBUG, 'Service request "{}" accepted (?)'.format(service) + ) + return + # Record that we saw a service-userauth acceptance, meaning we are free + # to submit auth requests. + self._service_userauth_accepted = True + self._log(DEBUG, "MSG_SERVICE_ACCEPT received; auth may begin") + + def ensure_session(self): + # Make sure we're not trying to auth on a not-yet-open or + # already-closed transport session; that's our responsibility, not that + # of AuthHandler. + if (not self.active) or (not self.initial_kex_done): + # TODO: better error message? this can happen in many places, eg + # user error (authing before connecting) or developer error (some + # improperly handled pre/mid auth shutdown didn't become fatal + # enough). The latter is much more common & should ideally be fixed + # by terminating things harder? + raise SSHException("No existing session") + # Also make sure we've actually been told we are allowed to auth. + if self._service_userauth_accepted: + return + # Or request to do so, otherwise. + m = Message() + m.add_byte(cMSG_SERVICE_REQUEST) + m.add_string("ssh-userauth") + self._log(DEBUG, "Sending MSG_SERVICE_REQUEST: ssh-userauth") + self._send_message(m) + # Now we wait to hear back; the user is expecting a blocking-style auth + # request so there's no point giving control back anywhere. + while not self._service_userauth_accepted: + # TODO: feels like we're missing an AuthHandler Event like + # 'self.auth_event' which is set when AuthHandler shuts down in + # ways good AND bad. Transport only seems to have completion_event + # which is unclear re: intent, eg it's set by newkeys which always + # happens on connection, so it'll always be set by the time we get + # here. + # NOTE: this copies the timing of event.wait() in + # AuthHandler.wait_for_response, re: 1/10 of a second. Could + # presumably be smaller, but seems unlikely this period is going to + # be "too long" for any code doing ssh networking... + time.sleep(0.1) + self.auth_handler = self.get_auth_handler() + + def get_auth_handler(self): + # NOTE: using new sibling subclass instead of classic AuthHandler + return AuthOnlyHandler(self) + + def auth_none(self, username): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + return self.auth_handler.auth_none(username) + + def auth_password(self, username, password, fallback=True): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + try: + return self.auth_handler.auth_password(username, password) + except BadAuthenticationType as e: + # if password auth isn't allowed, but keyboard-interactive *is*, + # try to fudge it + if not fallback or ("keyboard-interactive" not in e.allowed_types): + raise + try: + + def handler(title, instructions, fields): + if len(fields) > 1: + raise SSHException("Fallback authentication failed.") + if len(fields) == 0: + # for some reason, at least on os x, a 2nd request will + # be made with zero fields requested. maybe it's just + # to try to fake out automated scripting of the exact + # type we're doing here. *shrug* :) + return [] + return [password] + + return self.auth_interactive(username, handler) + except SSHException: + # attempt to fudge failed; just raise the original exception + raise e + + def auth_publickey(self, username, key): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + return self.auth_handler.auth_publickey(username, key) + + def auth_interactive(self, username, handler, submethods=""): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + return self.auth_handler.auth_interactive( + username, handler, submethods + ) + + def auth_interactive_dumb(self, username, handler=None, submethods=""): + # TODO 4.0: merge to parent, preserving (most of) docstring + # NOTE: legacy impl omitted equiv of ensure_session since it just wraps + # another call to an auth method. however we reinstate it for + # consistency reasons. + self.ensure_session() + if not handler: + + def handler(title, instructions, prompt_list): + answers = [] + if title: + print(title.strip()) + if instructions: + print(instructions.strip()) + for prompt, show_input in prompt_list: + print(prompt.strip(), end=" ") + answers.append(input()) + return answers + + return self.auth_interactive(username, handler, submethods) + + def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + self.auth_handler = self.get_auth_handler() + return self.auth_handler.auth_gssapi_with_mic( + username, gss_host, gss_deleg_creds + ) + + def auth_gssapi_keyex(self, username): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + self.auth_handler = self.get_auth_handler() + return self.auth_handler.auth_gssapi_keyex(username) diff --git a/.venv/lib/python3.9/site-packages/paramiko/util.py b/.venv/lib/python3.9/site-packages/paramiko/util.py new file mode 100644 index 0000000..c23e498 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/util.py @@ -0,0 +1,336 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Useful functions used by the rest of paramiko. +""" + + +import sys +import struct +import traceback +import threading +import logging + +from paramiko.common import ( + DEBUG, + zero_byte, + xffffffff, + max_byte, + byte_ord, + byte_chr, +) +from paramiko.config import SSHConfig + + +def inflate_long(s, always_positive=False): + """turns a normalized byte string into a long-int + (adapted from Crypto.Util.number)""" + out = 0 + negative = 0 + if not always_positive and (len(s) > 0) and (byte_ord(s[0]) >= 0x80): + negative = 1 + if len(s) % 4: + filler = zero_byte + if negative: + filler = max_byte + # never convert this to ``s +=`` because this is a string, not a number + # noinspection PyAugmentAssignment + s = filler * (4 - len(s) % 4) + s + for i in range(0, len(s), 4): + out = (out << 32) + struct.unpack(">I", s[i : i + 4])[0] + if negative: + out -= 1 << (8 * len(s)) + return out + + +def deflate_long(n, add_sign_padding=True): + """turns a long-int into a normalized byte string + (adapted from Crypto.Util.number)""" + # after much testing, this algorithm was deemed to be the fastest + s = bytes() + n = int(n) + while (n != 0) and (n != -1): + s = struct.pack(">I", n & xffffffff) + s + n >>= 32 + # strip off leading zeros, FFs + for i in enumerate(s): + if (n == 0) and (i[1] != 0): + break + if (n == -1) and (i[1] != 0xFF): + break + else: + # degenerate case, n was either 0 or -1 + i = (0,) + if n == 0: + s = zero_byte + else: + s = max_byte + s = s[i[0] :] + if add_sign_padding: + if (n == 0) and (byte_ord(s[0]) >= 0x80): + s = zero_byte + s + if (n == -1) and (byte_ord(s[0]) < 0x80): + s = max_byte + s + return s + + +def format_binary(data, prefix=""): + x = 0 + out = [] + while len(data) > x + 16: + out.append(format_binary_line(data[x : x + 16])) + x += 16 + if x < len(data): + out.append(format_binary_line(data[x:])) + return [prefix + line for line in out] + + +def format_binary_line(data): + left = " ".join(["{:02X}".format(byte_ord(c)) for c in data]) + right = "".join( + [".{:c}..".format(byte_ord(c))[(byte_ord(c) + 63) // 95] for c in data] + ) + return "{:50s} {}".format(left, right) + + +def safe_string(s): + out = b"" + for c in s: + i = byte_ord(c) + if 32 <= i <= 127: + out += byte_chr(i) + else: + out += b("%{:02X}".format(i)) + return out + + +def bit_length(n): + try: + return n.bit_length() + except AttributeError: + norm = deflate_long(n, False) + hbyte = byte_ord(norm[0]) + if hbyte == 0: + return 1 + bitlen = len(norm) * 8 + while not (hbyte & 0x80): + hbyte <<= 1 + bitlen -= 1 + return bitlen + + +def tb_strings(): + return "".join(traceback.format_exception(*sys.exc_info())).split("\n") + + +def generate_key_bytes(hash_alg, salt, key, nbytes): + """ + Given a password, passphrase, or other human-source key, scramble it + through a secure hash into some keyworthy bytes. This specific algorithm + is used for encrypting/decrypting private key files. + + :param function hash_alg: A function which creates a new hash object, such + as ``hashlib.sha256``. + :param salt: data to salt the hash with. + :type bytes salt: Hash salt bytes. + :param str key: human-entered password or passphrase. + :param int nbytes: number of bytes to generate. + :return: Key data, as `bytes`. + """ + keydata = bytes() + digest = bytes() + if len(salt) > 8: + salt = salt[:8] + while nbytes > 0: + hash_obj = hash_alg() + if len(digest) > 0: + hash_obj.update(digest) + hash_obj.update(b(key)) + hash_obj.update(salt) + digest = hash_obj.digest() + size = min(nbytes, len(digest)) + keydata += digest[:size] + nbytes -= size + return keydata + + +def load_host_keys(filename): + """ + Read a file of known SSH host keys, in the format used by openssh, and + return a compound dict of ``hostname -> keytype ->`` `PKey + `. The hostname may be an IP address or DNS name. + + This type of file unfortunately doesn't exist on Windows, but on posix, + it will usually be stored in ``os.path.expanduser("~/.ssh/known_hosts")``. + + Since 1.5.3, this is just a wrapper around `.HostKeys`. + + :param str filename: name of the file to read host keys from + :return: + nested dict of `.PKey` objects, indexed by hostname and then keytype + """ + from paramiko.hostkeys import HostKeys + + return HostKeys(filename) + + +def parse_ssh_config(file_obj): + """ + Provided only as a backward-compatible wrapper around `.SSHConfig`. + + .. deprecated:: 2.7 + Use `SSHConfig.from_file` instead. + """ + config = SSHConfig() + config.parse(file_obj) + return config + + +def lookup_ssh_host_config(hostname, config): + """ + Provided only as a backward-compatible wrapper around `.SSHConfig`. + """ + return config.lookup(hostname) + + +def mod_inverse(x, m): + # it's crazy how small Python can make this function. + u1, u2, u3 = 1, 0, m + v1, v2, v3 = 0, 1, x + + while v3 > 0: + q = u3 // v3 + u1, v1 = v1, u1 - v1 * q + u2, v2 = v2, u2 - v2 * q + u3, v3 = v3, u3 - v3 * q + if u2 < 0: + u2 += m + return u2 + + +_g_thread_data = threading.local() +_g_thread_counter = 0 +_g_thread_lock = threading.Lock() + + +def get_thread_id(): + global _g_thread_data, _g_thread_counter, _g_thread_lock # noqa + try: + return _g_thread_data.id + except AttributeError: + with _g_thread_lock: + _g_thread_counter += 1 + _g_thread_data.id = _g_thread_counter + return _g_thread_data.id + + +def log_to_file(filename, level=DEBUG): + """send paramiko logs to a logfile, + if they're not already going somewhere""" + logger = logging.getLogger("paramiko") + if len(logger.handlers) > 0: + return + logger.setLevel(level) + f = open(filename, "a") + handler = logging.StreamHandler(f) + frm = "%(levelname)-.3s [%(asctime)s.%(msecs)03d] thr=%(_threadid)-3d" + frm += " %(name)s: %(message)s" + handler.setFormatter(logging.Formatter(frm, "%Y%m%d-%H:%M:%S")) + logger.addHandler(handler) + + +# make only one filter object, so it doesn't get applied more than once +class PFilter: + def filter(self, record): + record._threadid = get_thread_id() + return True + + +_pfilter = PFilter() + + +def get_logger(name): + logger = logging.getLogger(name) + logger.addFilter(_pfilter) + return logger + + +def constant_time_bytes_eq(a, b): + if len(a) != len(b): + return False + res = 0 + # noinspection PyUnresolvedReferences + for i in range(len(a)): # noqa: F821 + res |= byte_ord(a[i]) ^ byte_ord(b[i]) + return res == 0 + + +class ClosingContextManager: + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + +def clamp_value(minimum, val, maximum): + return max(minimum, min(val, maximum)) + + +def asbytes(s): + """ + Coerce to bytes if possible or return unchanged. + """ + try: + # Attempt to run through our version of b(), which does the Right Thing + # for unicode strings vs bytestrings, and raises TypeError if it's not + # one of those types. + return b(s) + except TypeError: + try: + # If it wasn't a string/byte/buffer-ish object, try calling an + # asbytes() method, which many of our internal classes implement. + return s.asbytes() + except AttributeError: + # Finally, just do nothing & assume this object is sufficiently + # byte-y or buffer-y that everything will work out (or that callers + # are capable of handling whatever it is.) + return s + + +# TODO: clean this up / force callers to assume bytes OR unicode +def b(s, encoding="utf8"): + """cast unicode or bytes to bytes""" + if isinstance(s, bytes): + return s + elif isinstance(s, str): + return s.encode(encoding) + else: + raise TypeError(f"Expected unicode or bytes, got {type(s)}") + + +# TODO: clean this up / force callers to assume bytes OR unicode +def u(s, encoding="utf8"): + """cast bytes or unicode to unicode""" + if isinstance(s, bytes): + return s.decode(encoding) + elif isinstance(s, str): + return s + else: + raise TypeError(f"Expected unicode or bytes, got {type(s)}") diff --git a/.venv/lib/python3.9/site-packages/paramiko/win_openssh.py b/.venv/lib/python3.9/site-packages/paramiko/win_openssh.py new file mode 100644 index 0000000..614b589 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/win_openssh.py @@ -0,0 +1,56 @@ +# Copyright (C) 2021 Lew Gordon +# Copyright (C) 2022 Patrick Spendrin +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os.path +import time + +PIPE_NAME = r"\\.\pipe\openssh-ssh-agent" + + +def can_talk_to_agent(): + # use os.listdir() instead of os.path.exists(), because os.path.exists() + # uses CreateFileW() API and the pipe cannot be reopen unless the server + # calls DisconnectNamedPipe(). + dir_, name = os.path.split(PIPE_NAME) + name = name.lower() + return any(name == n.lower() for n in os.listdir(dir_)) + + +class OpenSSHAgentConnection: + def __init__(self): + while True: + try: + self._pipe = os.open(PIPE_NAME, os.O_RDWR | os.O_BINARY) + except OSError as e: + # retry when errno 22 which means that the server has not + # called DisconnectNamedPipe() yet. + if e.errno != 22: + raise + else: + break + time.sleep(0.1) + + def send(self, data): + return os.write(self._pipe, data) + + def recv(self, n): + return os.read(self._pipe, n) + + def close(self): + return os.close(self._pipe) diff --git a/.venv/lib/python3.9/site-packages/paramiko/win_pageant.py b/.venv/lib/python3.9/site-packages/paramiko/win_pageant.py new file mode 100644 index 0000000..c927de6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/paramiko/win_pageant.py @@ -0,0 +1,138 @@ +# Copyright (C) 2005 John Arbash-Meinel +# Modified up by: Todd Whiteman +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Functions for communicating with Pageant, the basic windows ssh agent program. +""" + +import array +import ctypes.wintypes +import platform +import struct +from paramiko.common import zero_byte +from paramiko.util import b + +import _thread as thread + +from . import _winapi + + +_AGENT_COPYDATA_ID = 0x804E50BA +_AGENT_MAX_MSGLEN = 8192 +# Note: The WM_COPYDATA value is pulled from win32con, as a workaround +# so we do not have to import this huge library just for this one variable. +win32con_WM_COPYDATA = 74 + + +def _get_pageant_window_object(): + return ctypes.windll.user32.FindWindowA(b"Pageant", b"Pageant") + + +def can_talk_to_agent(): + """ + Check to see if there is a "Pageant" agent we can talk to. + + This checks both if we have the required libraries (win32all or ctypes) + and if there is a Pageant currently running. + """ + return bool(_get_pageant_window_object()) + + +if platform.architecture()[0] == "64bit": + ULONG_PTR = ctypes.c_uint64 +else: + ULONG_PTR = ctypes.c_uint32 + + +class COPYDATASTRUCT(ctypes.Structure): + """ + ctypes implementation of + http://msdn.microsoft.com/en-us/library/windows/desktop/ms649010%28v=vs.85%29.aspx + """ + + _fields_ = [ + ("num_data", ULONG_PTR), + ("data_size", ctypes.wintypes.DWORD), + ("data_loc", ctypes.c_void_p), + ] + + +def _query_pageant(msg): + """ + Communication with the Pageant process is done through a shared + memory-mapped file. + """ + hwnd = _get_pageant_window_object() + if not hwnd: + # Raise a failure to connect exception, pageant isn't running anymore! + return None + + # create a name for the mmap + map_name = f"PageantRequest{thread.get_ident():08x}" + + pymap = _winapi.MemoryMap( + map_name, _AGENT_MAX_MSGLEN, _winapi.get_security_attributes_for_user() + ) + with pymap: + pymap.write(msg) + # Create an array buffer containing the mapped filename + char_buffer = array.array("b", b(map_name) + zero_byte) # noqa + char_buffer_address, char_buffer_size = char_buffer.buffer_info() + # Create a string to use for the SendMessage function call + cds = COPYDATASTRUCT( + _AGENT_COPYDATA_ID, char_buffer_size, char_buffer_address + ) + + response = ctypes.windll.user32.SendMessageA( + hwnd, win32con_WM_COPYDATA, ctypes.sizeof(cds), ctypes.byref(cds) + ) + + if response > 0: + pymap.seek(0) + datalen = pymap.read(4) + retlen = struct.unpack(">I", datalen)[0] + return datalen + pymap.read(retlen) + return None + + +class PageantConnection: + """ + Mock "connection" to an agent which roughly approximates the behavior of + a unix local-domain socket (as used by Agent). Requests are sent to the + pageant daemon via special Windows magick, and responses are buffered back + for subsequent reads. + """ + + def __init__(self): + self._response = None + + def send(self, data): + self._response = _query_pageant(data) + + def recv(self, n): + if self._response is None: + return "" + ret = self._response[:n] + self._response = self._response[n:] + if self._response == "": + self._response = None + return ret + + def close(self): + pass diff --git a/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/INSTALLER b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/LICENSE.txt b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/LICENSE.txt new file mode 100644 index 0000000..00addc2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) 2008-2021 The pip developers (see AUTHORS.txt file) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/METADATA b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/METADATA new file mode 100644 index 0000000..9d031ed --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/METADATA @@ -0,0 +1,93 @@ +Metadata-Version: 2.1 +Name: pip +Version: 21.3.1 +Summary: The PyPA recommended tool for installing Python packages. +Home-page: https://pip.pypa.io/ +Author: The pip developers +Author-email: distutils-sig@python.org +License: MIT +Project-URL: Documentation, https://pip.pypa.io +Project-URL: Source, https://github.com/pypa/pip +Project-URL: Changelog, https://pip.pypa.io/en/stable/news/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.6 +License-File: LICENSE.txt + +pip - The Python Package Installer +================================== + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.org/project/pip/ + +.. image:: https://readthedocs.org/projects/pip/badge/?version=latest + :target: https://pip.pypa.io/en/latest + +pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. + +Please take a look at our documentation for how to install and use pip: + +* `Installation`_ +* `Usage`_ + +We release updates regularly, with a new version every 3 months. Find more details in our documentation: + +* `Release notes`_ +* `Release process`_ + +In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right. + +**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3. + +If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: + +* `Issue tracking`_ +* `Discourse channel`_ +* `User IRC`_ + +If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: + +* `GitHub page`_ +* `Development documentation`_ +* `Development mailing list`_ +* `Development IRC`_ + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _package installer: https://packaging.python.org/guides/tool-recommendations/ +.. _Python Package Index: https://pypi.org +.. _Installation: https://pip.pypa.io/en/stable/installation/ +.. _Usage: https://pip.pypa.io/en/stable/ +.. _Release notes: https://pip.pypa.io/en/stable/news.html +.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ +.. _GitHub page: https://github.com/pypa/pip +.. _Development documentation: https://pip.pypa.io/en/latest/development +.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html +.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020 +.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html +.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support +.. _Issue tracking: https://github.com/pypa/pip/issues +.. _Discourse channel: https://discuss.python.org/c/packaging +.. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/ +.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa +.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + + diff --git a/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/RECORD b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/RECORD new file mode 100644 index 0000000..c157d65 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/RECORD @@ -0,0 +1,812 @@ +../../../bin/pip,sha256=hhg_DHabOzHzMrf9y2dDStlckoA8DKezcbLqnWQSuxI,262 +../../../bin/pip3,sha256=hhg_DHabOzHzMrf9y2dDStlckoA8DKezcbLqnWQSuxI,262 +../../../bin/pip3.9,sha256=hhg_DHabOzHzMrf9y2dDStlckoA8DKezcbLqnWQSuxI,262 +pip-21.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip-21.3.1.dist-info/LICENSE.txt,sha256=I6c2HCsVgQKLxiO52ivSSZeryqR4Gs5q1ESjeUT42uE,1090 +pip-21.3.1.dist-info/METADATA,sha256=PjWcvFEqJd4gOfiQam8il34_wPNKxf8ubyYI2wYm7tc,4216 +pip-21.3.1.dist-info/RECORD,, +pip-21.3.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip-21.3.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +pip-21.3.1.dist-info/entry_points.txt,sha256=5ExSa1s54zSPNA_1epJn5SX06786S8k5YHwskMvVYzw,125 +pip-21.3.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/__init__.py,sha256=798yhPIf6eMHi7R5Ogb3BJ5ALJ0Id8IwEuOSU2DFlp0,357 +pip/__main__.py,sha256=mXwWDftNLMKfwVqKFWGE_uuBZvGSIiUELhLkeysIuZc,1198 +pip/__pycache__/__init__.cpython-39.pyc,, +pip/__pycache__/__main__.cpython-39.pyc,, +pip/_internal/__init__.py,sha256=FU88-ODK1jGlVF2Qxp1gJqTwg0wujj12LayXQgFhlh8,587 +pip/_internal/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/__pycache__/build_env.cpython-39.pyc,, +pip/_internal/__pycache__/cache.cpython-39.pyc,, +pip/_internal/__pycache__/configuration.cpython-39.pyc,, +pip/_internal/__pycache__/exceptions.cpython-39.pyc,, +pip/_internal/__pycache__/main.cpython-39.pyc,, +pip/_internal/__pycache__/pyproject.cpython-39.pyc,, +pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc,, +pip/_internal/__pycache__/wheel_builder.cpython-39.pyc,, +pip/_internal/build_env.py,sha256=uIg4HJDgZK542FXVTl3jkPDNbklNgb8Rj6DeZef_oS8,9950 +pip/_internal/cache.py,sha256=71eaYwrls34HJ6gzbmmYiotiKhPNFTM_tqYJXD5nf3s,9441 +pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 +pip/_internal/cli/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc,, +pip/_internal/cli/__pycache__/base_command.cpython-39.pyc,, +pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc,, +pip/_internal/cli/__pycache__/command_context.cpython-39.pyc,, +pip/_internal/cli/__pycache__/main.cpython-39.pyc,, +pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc,, +pip/_internal/cli/__pycache__/parser.cpython-39.pyc,, +pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc,, +pip/_internal/cli/__pycache__/req_command.cpython-39.pyc,, +pip/_internal/cli/__pycache__/spinners.cpython-39.pyc,, +pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc,, +pip/_internal/cli/autocompletion.py,sha256=NK5yqe49SgExZOCFVEUT5Bf0QV2CuITGK27WSo2MWg8,6399 +pip/_internal/cli/base_command.py,sha256=oFuvjLsYE17V67L1dHeTo-YePZN97RKpOuGEXwCKwLc,7790 +pip/_internal/cli/cmdoptions.py,sha256=o6hueHSc3VWZ-_do9eeoZKEaxqh18zlXKAzVZ00Kg-o,28391 +pip/_internal/cli/command_context.py,sha256=a1pBBvvGLDiZ1Kw64_4tT6HmRTwYDoYy8JFgG5Czn7s,760 +pip/_internal/cli/main.py,sha256=ioJ8IVlb2K1qLOxR-tXkee9lURhYV89CDM71MKag7YY,2472 +pip/_internal/cli/main_parser.py,sha256=Q9TnytfuC5Z2JSjBFWVGtEdYLFy7rukNIb04movHdAo,2614 +pip/_internal/cli/parser.py,sha256=CDXTuFr2UD8ozOlZYf1KDziQdo9-X_IaYOiUcyJQwrA,10788 +pip/_internal/cli/progress_bars.py,sha256=ha8wowclY8_PaoM0cz4G6qK37zjnzuxQ-ydOtzx4EMI,8300 +pip/_internal/cli/req_command.py,sha256=La6J8YonTxoPtJ8HMPN4RTKyzg0VS_R4vxfVf_HmFZw,17097 +pip/_internal/cli/spinners.py,sha256=TFhjxtOnLeNJ5YmRvQm4eKPgPbJNkZiqO8jOXuxRaYU,5076 +pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116 +pip/_internal/commands/__init__.py,sha256=Vc1HjsLEtyCh7506OozPHPKXe2Hk-z9cFkFF3BMj1lM,3736 +pip/_internal/commands/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/commands/__pycache__/cache.cpython-39.pyc,, +pip/_internal/commands/__pycache__/check.cpython-39.pyc,, +pip/_internal/commands/__pycache__/completion.cpython-39.pyc,, +pip/_internal/commands/__pycache__/configuration.cpython-39.pyc,, +pip/_internal/commands/__pycache__/debug.cpython-39.pyc,, +pip/_internal/commands/__pycache__/download.cpython-39.pyc,, +pip/_internal/commands/__pycache__/freeze.cpython-39.pyc,, +pip/_internal/commands/__pycache__/hash.cpython-39.pyc,, +pip/_internal/commands/__pycache__/help.cpython-39.pyc,, +pip/_internal/commands/__pycache__/index.cpython-39.pyc,, +pip/_internal/commands/__pycache__/install.cpython-39.pyc,, +pip/_internal/commands/__pycache__/list.cpython-39.pyc,, +pip/_internal/commands/__pycache__/search.cpython-39.pyc,, +pip/_internal/commands/__pycache__/show.cpython-39.pyc,, +pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc,, +pip/_internal/commands/__pycache__/wheel.cpython-39.pyc,, +pip/_internal/commands/cache.py,sha256=p9gvc6W_xgxE2zO0o8NXqO1gGJEinEK42qEC-a7Cnuk,7524 +pip/_internal/commands/check.py,sha256=0gjXR7j36xJT5cs2heYU_dfOfpnFfzX8OoPNNoKhqdM,1685 +pip/_internal/commands/completion.py,sha256=kTG_I1VR3N5kGC4Ma9pQTSoY9Q1URCrNyseHSQ-rCL4,2958 +pip/_internal/commands/configuration.py,sha256=arE8vLstjBg-Ar1krXF-bBmT1qBtnL7Fpk-NVh38a0U,8944 +pip/_internal/commands/debug.py,sha256=krET-y45CnQzXwKR1qA3M_tJE4LE2vnQtm3yfGyDSnE,6629 +pip/_internal/commands/download.py,sha256=p4lmYDgawRrwDFUpde_-1Gld45FnsMNHUFtOWFUCcSE,4904 +pip/_internal/commands/freeze.py,sha256=gCjoD6foBZPBAAYx5t8zZLkJhsF_ZRtnb3dPuD7beO8,2951 +pip/_internal/commands/hash.py,sha256=EVVOuvGtoPEdFi8SNnmdqlCQrhCxV-kJsdwtdcCnXGQ,1703 +pip/_internal/commands/help.py,sha256=gcc6QDkcgHMOuAn5UxaZwAStsRBrnGSn_yxjS57JIoM,1132 +pip/_internal/commands/index.py,sha256=1VVXXj5MsI2qH-N7uniQQyVkg-KCn_RdjiyiUmkUS5U,4762 +pip/_internal/commands/install.py,sha256=HTWdTb72Bcrm2tA_d55_hX6yQbchnr_XRdA2Xs8uApU,27851 +pip/_internal/commands/list.py,sha256=SnCh19e5zQKonNP7j25c_xru0Wm7wWWF8j49f-Dy9Bw,12203 +pip/_internal/commands/search.py,sha256=sbBZiARRc050QquOKcCvOr2K3XLsoYebLKZGRi__iUI,5697 +pip/_internal/commands/show.py,sha256=OREbPHF6UzvQiGLC1UIjG52Kc_jYDgcXZMYzgKXMbBI,8064 +pip/_internal/commands/uninstall.py,sha256=DNTYAGJNljMO_YYBxrpcwj0FEl7lo_P55_98O6g2TNk,3526 +pip/_internal/commands/wheel.py,sha256=xGSwLPYUM7jP_McD-wnM4D3zsP0n-NSkHFp4d0mAWIg,6168 +pip/_internal/configuration.py,sha256=dKHBEl8aXnqVuRB0NW7Nz7lyYMwr7XCfkMZvUORaSRo,13153 +pip/_internal/distributions/__init__.py,sha256=Hq6kt6gXBgjNit5hTTWLAzeCNOKoB-N0pGYSqehrli8,858 +pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/distributions/__pycache__/base.cpython-39.pyc,, +pip/_internal/distributions/__pycache__/installed.cpython-39.pyc,, +pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc,, +pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc,, +pip/_internal/distributions/base.py,sha256=3FUYD8Gb4YuSu3pggC_FRctZBDbpm5ZK89tPksIUjoE,1172 +pip/_internal/distributions/installed.py,sha256=QObf6KALGtwGx-Ap3Ua5FfcfaRMXWOk_wcrm7n5gYII,767 +pip/_internal/distributions/sdist.py,sha256=3fsErGhAWdGzuO7Wea0F_8b9fKyUL1PoYet273OoAoM,5598 +pip/_internal/distributions/wheel.py,sha256=-NgzdIs-w_hcer_U81yzgpVTljJRg5m79xufqvbjv0s,1115 +pip/_internal/exceptions.py,sha256=XyfiRZn2X8WR61X-JF50BU72TdmVkneWPy9cnuKv2Rg,12762 +pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30 +pip/_internal/index/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/index/__pycache__/collector.cpython-39.pyc,, +pip/_internal/index/__pycache__/package_finder.cpython-39.pyc,, +pip/_internal/index/__pycache__/sources.cpython-39.pyc,, +pip/_internal/index/collector.py,sha256=7rhUeH0IU_dUMk13-lBAN9czRuJ6dbG76Un7xuQ36Ck,17534 +pip/_internal/index/package_finder.py,sha256=_N9LIcwAXbGDN3BUDlikSB93WI9PHv3MvkJ4YapfrPY,36344 +pip/_internal/index/sources.py,sha256=SVyPitv08-Qalh2_Bk5diAJ9GAA_d-a93koouQodAG0,6557 +pip/_internal/locations/__init__.py,sha256=CpH6Cz9HSZ0csN_KPtOcvS9TGYLb7ZNGtCAAmVtjXW0,14444 +pip/_internal/locations/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc,, +pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc,, +pip/_internal/locations/__pycache__/base.cpython-39.pyc,, +pip/_internal/locations/_distutils.py,sha256=Sk7tw8ZP1DWMYJ8MibABsa8IME2Ejv1PKeGlYQCBTZc,5871 +pip/_internal/locations/_sysconfig.py,sha256=LQNKTJKyjVqxXaPntlBwdUqTG1xwYf6GVCKMbyRJx5M,7918 +pip/_internal/locations/base.py,sha256=x5D1ONktmPJd8nnUTh-ELsAJ7fiXA-k-0a_vhfi2_Us,1579 +pip/_internal/main.py,sha256=r-UnUe8HLo5XFJz8inTcOOTiu_sxNhgHb6VwlGUllOI,340 +pip/_internal/metadata/__init__.py,sha256=HzTS3lRukzn-MJaEZkUQhAFe6ulxvNe7nNoBvUzy-DU,1660 +pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/metadata/__pycache__/base.cpython-39.pyc,, +pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc,, +pip/_internal/metadata/base.py,sha256=gbNbb9blWO5hejmror-2n4_wLuYVrTyqwUluY9OmnMg,11103 +pip/_internal/metadata/pkg_resources.py,sha256=-LiuojtAfl3yhNx8rnUKYN3ECBVCVcDWszCupithXAw,5089 +pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 +pip/_internal/models/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/models/__pycache__/candidate.cpython-39.pyc,, +pip/_internal/models/__pycache__/direct_url.cpython-39.pyc,, +pip/_internal/models/__pycache__/format_control.cpython-39.pyc,, +pip/_internal/models/__pycache__/index.cpython-39.pyc,, +pip/_internal/models/__pycache__/link.cpython-39.pyc,, +pip/_internal/models/__pycache__/scheme.cpython-39.pyc,, +pip/_internal/models/__pycache__/search_scope.cpython-39.pyc,, +pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc,, +pip/_internal/models/__pycache__/target_python.cpython-39.pyc,, +pip/_internal/models/__pycache__/wheel.cpython-39.pyc,, +pip/_internal/models/candidate.py,sha256=6pcABsaR7CfIHlbJbr2_kMkVJFL_yrYjTx6SVWUnCPQ,990 +pip/_internal/models/direct_url.py,sha256=7XtGQSLLDQb5ZywI2EMnnLcddtf5CJLx44lMtTHPxFw,6350 +pip/_internal/models/format_control.py,sha256=DJpMYjxeYKKQdwNcML2_F0vtAh-qnKTYe-CpTxQe-4g,2520 +pip/_internal/models/index.py,sha256=tYnL8oxGi4aSNWur0mG8DAP7rC6yuha_MwJO8xw0crI,1030 +pip/_internal/models/link.py,sha256=hoT_qsOBAgLBm9GKqpBrNF_mrEXeGXQE-aH_RX2cGgg,9817 +pip/_internal/models/scheme.py,sha256=3EFQp_ICu_shH1-TBqhl0QAusKCPDFOlgHFeN4XowWs,738 +pip/_internal/models/search_scope.py,sha256=LwloG0PJAmtI1hFXIypsD95kWE9xfR5hf_a2v1Vw7sk,4520 +pip/_internal/models/selection_prefs.py,sha256=KZdi66gsR-_RUXUr9uejssk3rmTHrQVJWeNA2sV-VSY,1907 +pip/_internal/models/target_python.py,sha256=qKpZox7J8NAaPmDs5C_aniwfPDxzvpkrCKqfwndG87k,3858 +pip/_internal/models/wheel.py,sha256=hN9Ub-m-cAJCajCcQHyQNsqpcDCbPPDlEzBDwaBMc14,3500 +pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50 +pip/_internal/network/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/network/__pycache__/auth.cpython-39.pyc,, +pip/_internal/network/__pycache__/cache.cpython-39.pyc,, +pip/_internal/network/__pycache__/download.cpython-39.pyc,, +pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc,, +pip/_internal/network/__pycache__/session.cpython-39.pyc,, +pip/_internal/network/__pycache__/utils.cpython-39.pyc,, +pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc,, +pip/_internal/network/auth.py,sha256=a3C7Xaa8kTJjXkdi_wrUjqaySc8Z9Yz7U6QIbXfzMyc,12190 +pip/_internal/network/cache.py,sha256=HoprMCecwd4IS2wDZowc9B_OpaBlFjJYJl4xOxvtuwU,2100 +pip/_internal/network/download.py,sha256=VmiR-KKIBugShZS4JlD7N8mq3hErx-0fK-D8aTYU3Og,6016 +pip/_internal/network/lazy_wheel.py,sha256=1b8ZJ1w4bSBzpGzGwJR_CL2yQ6AFIwWQkS1vbPPw2XU,7627 +pip/_internal/network/session.py,sha256=38IKGKC64MTVUIH5XOR1hr2pOCzp39RccykdmGAvqRU,16729 +pip/_internal/network/utils.py,sha256=igLlTu_-q0LmL8FdJKq-Uj7AT_owrQ-T9FfyarkhK5U,4059 +pip/_internal/network/xmlrpc.py,sha256=AzQgG4GgS152_cqmGr_Oz2MIXsCal-xfsis7fA7nmU0,1791 +pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/operations/__pycache__/check.cpython-39.pyc,, +pip/_internal/operations/__pycache__/freeze.cpython-39.pyc,, +pip/_internal/operations/__pycache__/prepare.cpython-39.pyc,, +pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc,, +pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc,, +pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc,, +pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc,, +pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc,, +pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc,, +pip/_internal/operations/build/metadata.py,sha256=KEsyrRFOBs2jhR-AcjyJyeV5GlsK1ubQqAB1j-b0Zu4,1119 +pip/_internal/operations/build/metadata_editable.py,sha256=RnA8UgQqZwtBjBdqi1DW1gI3xaZ7qhKp1Xd-0YTktSk,1177 +pip/_internal/operations/build/metadata_legacy.py,sha256=hjAJ75iKuJfKQYALZD0U6wJ7ElJ_BAEvjDxF8b9_l5k,1945 +pip/_internal/operations/build/wheel.py,sha256=AO9XnTGhTgHtZmU8Dkbfo1OGr41rBuSDjIgAa4zUKgE,1063 +pip/_internal/operations/build/wheel_editable.py,sha256=TVETY-L_M_dSEKBhTIcQOP75zKVXw8tuq1U354Mm30A,1405 +pip/_internal/operations/build/wheel_legacy.py,sha256=aFMVOvyG-_CAIuXEVxuPJkz5UfCppSeu9FBPzn2tWvI,3047 +pip/_internal/operations/check.py,sha256=ca4O9CkPt9Em9sLCf3H0iVt1GIcW7M8C0U5XooaBuT4,5109 +pip/_internal/operations/freeze.py,sha256=ZiYw5GlUpLVx4VJHz4S1AP2JFNyvH0iq5kpcYj2ovyw,9770 +pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51 +pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc,, +pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc,, +pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc,, +pip/_internal/operations/install/editable_legacy.py,sha256=J4VCOHvk_BgA_wG02WmlDtSWLwZJ5S_g9SXBkjYojaw,1298 +pip/_internal/operations/install/legacy.py,sha256=YKrZvH894Iqf2oEkYqF9O7CK1DjTgfZCP3R9Azpjeqo,4158 +pip/_internal/operations/install/wheel.py,sha256=QuQyCZE-XjuJjDYRixo40oUt2ucFhNmSrCbcXY7A9aE,27412 +pip/_internal/operations/prepare.py,sha256=Dg-lFYsFhYeib8NuQvGOxd0wxcmTqXfe_c5zYb3ep64,23838 +pip/_internal/pyproject.py,sha256=YgcyleTgyuh7NwGH9j8_21htqnF_VxgKiPc4ecLBWKk,7215 +pip/_internal/req/__init__.py,sha256=A7mUvT1KAcCYP3H7gUOTx2GRMlgoDur3H68Q0OJqM5A,2793 +pip/_internal/req/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/req/__pycache__/constructors.cpython-39.pyc,, +pip/_internal/req/__pycache__/req_file.cpython-39.pyc,, +pip/_internal/req/__pycache__/req_install.cpython-39.pyc,, +pip/_internal/req/__pycache__/req_set.cpython-39.pyc,, +pip/_internal/req/__pycache__/req_tracker.cpython-39.pyc,, +pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc,, +pip/_internal/req/constructors.py,sha256=FVWkWeGt3fK0DTC3Gurd2jglp_Z10CK-abd6yM3HD-A,15285 +pip/_internal/req/req_file.py,sha256=5N8OTouPCof-305StC2YK9HBxQMw-xO46skRoBPbkZo,17421 +pip/_internal/req/req_install.py,sha256=N8xohvY6CIaVt6D1sU9VWv2muO9oPjixIDisqBXUr0E,33804 +pip/_internal/req/req_set.py,sha256=kHYiLvkKRx21WaLTwOI-54Ng0SSzZZ9SE7FD0PsfvYA,7584 +pip/_internal/req/req_tracker.py,sha256=jK7JDu-Wt73X-gqozrFtgJVlUlnQo0P4IQ4x4_gPlfM,4117 +pip/_internal/req/req_uninstall.py,sha256=Uf8Kx-PgoQIudFq9Y7sFP-uz_I6x1gEfPpJJxujOf14,23748 +pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/resolution/__pycache__/base.cpython-39.pyc,, +pip/_internal/resolution/base.py,sha256=qlmh325SBVfvG6Me9gc5Nsh5sdwHBwzHBq6aEXtKsLA,583 +pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc,, +pip/_internal/resolution/legacy/resolver.py,sha256=Fr7bfTaKqXoaIfSte7mvFRLMb8pAaiozgydoHeIyiHI,18312 +pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc,, +pip/_internal/resolution/resolvelib/base.py,sha256=u1O4fkvCO4mhmu5i32xrDv9AX5NgUci_eYVyBDQhTIM,5220 +pip/_internal/resolution/resolvelib/candidates.py,sha256=5q66J90AoMKKwy1HsdXvEeleOJG8QkAbo8OidFekee0,18210 +pip/_internal/resolution/resolvelib/factory.py,sha256=GnjXkaWRbfjdtQJcjcmkXUyPIgjckCHTu6wkneDMck8,26806 +pip/_internal/resolution/resolvelib/found_candidates.py,sha256=hvL3Hoa9VaYo-qEOZkBi2Iqw251UDxPz-uMHVaWmLpE,5705 +pip/_internal/resolution/resolvelib/provider.py,sha256=HUMHvkU001rtlqvs11NPmMtlyMMLlVQfAl6qXdsLxZQ,9205 +pip/_internal/resolution/resolvelib/reporter.py,sha256=3ZVVYrs5PqvLFJkGLcuXoMK5mTInFzl31xjUpDBpZZk,2526 +pip/_internal/resolution/resolvelib/requirements.py,sha256=pcsnwz7txyDNZUEOWJOZEfivy3COWHPf_DIU7fwZ-Kk,5455 +pip/_internal/resolution/resolvelib/resolver.py,sha256=bkrMZs_jJHP_KFAbg36-lcN4Ums7ESgllup8piHXOz0,9580 +pip/_internal/self_outdated_check.py,sha256=nVLSc0nl4JZ9VI7GsZvblE-zzT-T5ofmMgplned8s_s,6393 +pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/utils/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/utils/__pycache__/_log.cpython-39.pyc,, +pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc,, +pip/_internal/utils/__pycache__/compat.cpython-39.pyc,, +pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc,, +pip/_internal/utils/__pycache__/datetime.cpython-39.pyc,, +pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc,, +pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc,, +pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc,, +pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc,, +pip/_internal/utils/__pycache__/encoding.cpython-39.pyc,, +pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc,, +pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc,, +pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc,, +pip/_internal/utils/__pycache__/glibc.cpython-39.pyc,, +pip/_internal/utils/__pycache__/hashes.cpython-39.pyc,, +pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc,, +pip/_internal/utils/__pycache__/logging.cpython-39.pyc,, +pip/_internal/utils/__pycache__/misc.cpython-39.pyc,, +pip/_internal/utils/__pycache__/models.cpython-39.pyc,, +pip/_internal/utils/__pycache__/packaging.cpython-39.pyc,, +pip/_internal/utils/__pycache__/parallel.cpython-39.pyc,, +pip/_internal/utils/__pycache__/pkg_resources.cpython-39.pyc,, +pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc,, +pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc,, +pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc,, +pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc,, +pip/_internal/utils/__pycache__/urls.cpython-39.pyc,, +pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc,, +pip/_internal/utils/__pycache__/wheel.cpython-39.pyc,, +pip/_internal/utils/_log.py,sha256=-jHLOE_THaZz5BFcCnoSL9EYAtJ0nXem49s9of4jvKw,1015 +pip/_internal/utils/appdirs.py,sha256=swgcTKOm3daLeXTW6v5BUS2Ti2RvEnGRQYH_yDXklAo,1665 +pip/_internal/utils/compat.py,sha256=ACyBfLgj3_XG-iA5omEDrXqDM0cQKzi8h8HRBInzG6Q,1884 +pip/_internal/utils/compatibility_tags.py,sha256=ydin8QG8BHqYRsPY4OL6cmb44CbqXl1T0xxS97VhHkk,5377 +pip/_internal/utils/datetime.py,sha256=m21Y3wAtQc-ji6Veb6k_M5g6A0ZyFI4egchTdnwh-pQ,242 +pip/_internal/utils/deprecation.py,sha256=NKo8VqLioJ4nnXXGmW4KdasxF90EFHkZaHeX1fT08C8,3627 +pip/_internal/utils/direct_url_helpers.py,sha256=6F1tc2rcKaCZmgfVwsE6ObIe_Pux23mUVYA-2D9wCFc,3206 +pip/_internal/utils/distutils_args.py,sha256=mcAscyp80vTt3xAGTipnpgc83V-_wCvydNELVXLq7JI,1249 +pip/_internal/utils/egg_link.py,sha256=5MVlpz5LirT4iLQq86OYzjXaYF0D4Qk1dprEI7ThST4,2203 +pip/_internal/utils/encoding.py,sha256=bdZ3YgUpaOEBI5MP4-DEXiQarCW3V0rxw1kRz-TaU1Q,1169 +pip/_internal/utils/entrypoints.py,sha256=g4QKi37JkZ2r6A9WbYLMwVzOiE3dn_lsqVhaFy0Erq0,1130 +pip/_internal/utils/filesystem.py,sha256=rrl-rY1w8TYyKYndUyZlE9ffkQyA4-jI9x_59zXkn5s,5893 +pip/_internal/utils/filetypes.py,sha256=i8XAQ0eFCog26Fw9yV0Yb1ygAqKYB1w9Cz9n0fj8gZU,716 +pip/_internal/utils/glibc.py,sha256=tDfwVYnJCOC0BNVpItpy8CGLP9BjkxFHdl0mTS0J7fc,3110 +pip/_internal/utils/hashes.py,sha256=anpZfFGIT6HcIj2td9NHtE8AWg6GeAIhwpP8GPvZE0E,4811 +pip/_internal/utils/inject_securetransport.py,sha256=o-QRVMGiENrTJxw3fAhA7uxpdEdw6M41TjHYtSVRrcg,795 +pip/_internal/utils/logging.py,sha256=oEkBvjj2A6NtVo75_Q-sL7qqH0bMFuY0pK4d8t40SKg,11532 +pip/_internal/utils/misc.py,sha256=HfMsfc9LQbjNlf_EdYm79Ggxb63Nd9WOfoZSW3H4wmo,20432 +pip/_internal/utils/models.py,sha256=5GoYU586SrxURMvDn_jBMJInitviJg4O5-iOU-6I0WY,1193 +pip/_internal/utils/packaging.py,sha256=wA29RPW_KkorI2PIfkm9cWCytpcVbk-wubwUE8YTmbQ,2952 +pip/_internal/utils/parallel.py,sha256=Z-vNgYsyiAx8JfZYbD6ZSzkkPfpk0ANQI_YpCBE0Pxo,3196 +pip/_internal/utils/pkg_resources.py,sha256=A7HUm5lSk7n1_7qypyI4QkXErXgb5iXDlKPXo8r_1Hk,987 +pip/_internal/utils/setuptools_build.py,sha256=yDrfmxUgd0A9SDKV-7UuSTA3YLmVav5J86G9Fym-2FE,4697 +pip/_internal/utils/subprocess.py,sha256=cy2c6XRuYkX3XJF_lIjY5nQL2XygBHLJr6WXwTsjfnc,10058 +pip/_internal/utils/temp_dir.py,sha256=zob3PYMVevONkheOMUp_4jDofrEY3HIu5DHK78cSspI,7662 +pip/_internal/utils/unpacking.py,sha256=Lpgq9-na7nS5lEQybICpLisXZVELLP1YyWhb-SLEPSc,12481 +pip/_internal/utils/urls.py,sha256=AhaesUGl-9it6uvG6fsFPOr9ynFpGaTMk4t5XTX7Z_Q,1759 +pip/_internal/utils/virtualenv.py,sha256=4_48qMzCwB_F5jIK5BC_ua7uiAMVifmQWU9NdaGUoVA,3459 +pip/_internal/utils/wheel.py,sha256=YwsLfuDzPJhFLuGotZ69i0bxJVGSweGuIHG2SxZvZtM,6163 +pip/_internal/vcs/__init__.py,sha256=UAqvzpbi0VbZo3Ub6skEeZAw-ooIZR-zX_WpCbxyCoU,596 +pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc,, +pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc,, +pip/_internal/vcs/__pycache__/git.cpython-39.pyc,, +pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc,, +pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc,, +pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc,, +pip/_internal/vcs/bazaar.py,sha256=pNMHrCLx1jSJzu1t1ycDVwhXQ23XI4Q483cvewaTUDs,2857 +pip/_internal/vcs/git.py,sha256=Ph_hThbfTG040GpJRz1z0ByiNkj5eHgF_shCCbNnCw0,17804 +pip/_internal/vcs/mercurial.py,sha256=LgB5xoX8CPtkl5pvvhXw9xQAXA0SF8CSmyVkGHScP78,4956 +pip/_internal/vcs/subversion.py,sha256=h4_nYmYN9kcfeTPp9wjkHhIeTpFZwoCp1UVm4hbBq90,11596 +pip/_internal/vcs/versioncontrol.py,sha256=W1zLW32PeuYiCV1I_dhqlk_n74B_GFTjNC5xdxs-1Ek,22414 +pip/_internal/wheel_builder.py,sha256=ZakEA7CEJyp70yHoX0QLE8TAwM7vxF9PYPtjBxT3F1I,12247 +pip/_vendor/__init__.py,sha256=xjcBX0EP50pkaMdCssrsBXoZgo2hTtYxlcH1CIyA3T4,4708 +pip/_vendor/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/__pycache__/distro.cpython-39.pyc,, +pip/_vendor/__pycache__/pyparsing.cpython-39.pyc,, +pip/_vendor/__pycache__/six.cpython-39.pyc,, +pip/_vendor/cachecontrol/__init__.py,sha256=pJtAaUxOsMPnytI1A3juAJkXYDr8krdSnsg4Yg3OBEg,302 +pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc,, +pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc,, +pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc,, +pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc,, +pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc,, +pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc,, +pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc,, +pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc,, +pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc,, +pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295 +pip/_vendor/cachecontrol/adapter.py,sha256=sSwaSYd93IIfCFU4tOMgSo6b2LCt_gBSaQUj8ktJFOA,4882 +pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805 +pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86 +pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc,, +pip/_vendor/cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153 +pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856 +pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695 +pip/_vendor/cachecontrol/controller.py,sha256=CWEX3pedIM9s60suf4zZPtm_JvVgnvogMGK_OiBG5F8,14149 +pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533 +pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070 +pip/_vendor/cachecontrol/serialize.py,sha256=vIa4jvq4x_KSOLdEIedoknX2aXYHQujLDFV4-F21Dno,7091 +pip/_vendor/cachecontrol/wrapper.py,sha256=5LX0uJwkNQUtYSEw3aGmGu9WY8wGipd81mJ8lG0d0M4,690 +pip/_vendor/certifi/__init__.py,sha256=-b78tXibbl0qtgCzv9tc9v6ozwcNX915lT9Tf4a9lds,62 +pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255 +pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc,, +pip/_vendor/certifi/__pycache__/core.cpython-39.pyc,, +pip/_vendor/certifi/core.py,sha256=xcXTRTzkcnYHuGFEjzUFUvg9MOnYkD1gjqhsWOUiB0Y,2846 +pip/_vendor/chardet/__init__.py,sha256=mWZaWmvZkhwfBEAT9O1Y6nRTfKzhT7FHhQTTAujbqUA,3271 +pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/compat.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc,, +pip/_vendor/chardet/__pycache__/version.cpython-39.pyc,, +pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +pip/_vendor/chardet/charsetgroupprober.py,sha256=GZLReHP6FRRn43hvSOoGCxYamErKzyp6RgOQxVeC3kg,3839 +pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc,, +pip/_vendor/chardet/cli/chardetect.py,sha256=XK5zqjUG2a4-y6eLHZ8ThYcp6WWUrdlmELxNypcc2SE,2747 +pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +pip/_vendor/chardet/compat.py,sha256=40zr6wICZwknxyuLGGcIOPyve8DTebBCbbvttvnmp5Q,1200 +pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +pip/_vendor/chardet/langbulgarianmodel.py,sha256=Ys1irPhpi61ywJpv6a-6JQOLwn7Ed7YqTBpGZ7adhOk,105675 +pip/_vendor/chardet/langgreekmodel.py,sha256=en5Np_mAnOAR1Akoh0562eSe7phGWuq7H2_KoEv4eQU,99549 +pip/_vendor/chardet/langhebrewmodel.py,sha256=NHkt2JO49NbzkgPzgYgrGIKvpO040WOjUMEPRzkZVDo,98754 +pip/_vendor/chardet/langhungarianmodel.py,sha256=IA_31i27hFJwCRhHQ_0CdUIeBr_qndYmj2SJqDdw1n4,102476 +pip/_vendor/chardet/langrussianmodel.py,sha256=QzrKRvQInhNxnMkYTugNqEfvVFZl_WI1bj9rFLdjYgQ,131158 +pip/_vendor/chardet/langthaimodel.py,sha256=Ju2fDTy26CdiZYkioD8I5ZzKEGe8Et-49zon1_OOZI4,103290 +pip/_vendor/chardet/langturkishmodel.py,sha256=ycEZsKGFiLl-FgoZLiUmWeS8F7F2ts3RrNdgRzArDlU,95924 +pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +pip/_vendor/chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/chardet/metadata/__pycache__/languages.cpython-39.pyc,, +pip/_vendor/chardet/metadata/languages.py,sha256=Bn9KbnR_sctMbPDMw9IwxOyYeOGuE4tcVNjvE2FOEmA,19452 +pip/_vendor/chardet/sbcharsetprober.py,sha256=nmyMyuxzG87DN6K3Rk2MUzJLMLR69MrWpdnHzOwVUwQ,6136 +pip/_vendor/chardet/sbcsgroupprober.py,sha256=hqefQuXmiFyDBArOjujH6hd6WFXlOD1kWCsxDhjx5Vc,4309 +pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +pip/_vendor/chardet/universaldetector.py,sha256=DpZTXCX0nUHXxkQ9sr4GZxGB_hveZ6hWt3uM94cgWKs,12503 +pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +pip/_vendor/chardet/version.py,sha256=A4CILFAd8MRVG1HoXPp45iK9RLlWyV73a1EtwE8Tvn8,242 +pip/_vendor/colorama/__init__.py,sha256=pCdErryzLSzDW5P-rRPBlPLqbBtIRNJB6cMgoeJns5k,239 +pip/_vendor/colorama/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/colorama/__pycache__/ansi.cpython-39.pyc,, +pip/_vendor/colorama/__pycache__/ansitowin32.cpython-39.pyc,, +pip/_vendor/colorama/__pycache__/initialise.cpython-39.pyc,, +pip/_vendor/colorama/__pycache__/win32.cpython-39.pyc,, +pip/_vendor/colorama/__pycache__/winterm.cpython-39.pyc,, +pip/_vendor/colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522 +pip/_vendor/colorama/ansitowin32.py,sha256=yV7CEmCb19MjnJKODZEEvMH_fnbJhwnpzo4sxZuGXmA,10517 +pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915 +pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404 +pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438 +pip/_vendor/distlib/__init__.py,sha256=HTGLP7dnTRTQCbEZNGUxBq-0sobr0KQUMn3yd6uEObA,581 +pip/_vendor/distlib/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/compat.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/database.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/index.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/locators.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/manifest.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/markers.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/metadata.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/resources.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/scripts.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/util.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/version.cpython-39.pyc,, +pip/_vendor/distlib/__pycache__/wheel.cpython-39.pyc,, +pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 +pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/distlib/_backport/__pycache__/misc.cpython-39.pyc,, +pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-39.pyc,, +pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-39.pyc,, +pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-39.pyc,, +pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 +pip/_vendor/distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707 +pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 +pip/_vendor/distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854 +pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 +pip/_vendor/distlib/compat.py,sha256=fbsxc5PfJ2wBx1K4k6mQ2goAYs-GZW0tcOPIlE_vf0I,41495 +pip/_vendor/distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059 +pip/_vendor/distlib/index.py,sha256=UfcimNW19AB7IKWam4VaJbXuCBvArKfSxhV16EwavzE,20739 +pip/_vendor/distlib/locators.py,sha256=AKlB3oZvfOTg4E0CtfwOzujFL19X5V4XUA4eHdKOu44,51965 +pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 +pip/_vendor/distlib/markers.py,sha256=9c70ISEKwBjmUOHuIdOygVnRVESOKdNYp9a2TVn4qrI,4989 +pip/_vendor/distlib/metadata.py,sha256=vatoxFdmBr6ie-sTVXVNPOPG3uwMDWJTnEECnm7xDCw,39109 +pip/_vendor/distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820 +pip/_vendor/distlib/scripts.py,sha256=tjSwENINeV91ROZxec5zTSMRg2jEeKc4enyCHDzNvEE,17720 +pip/_vendor/distlib/util.py,sha256=0Uq_qa63FCLtdyNdWvMnmPbiSvVa-ykHM2E8HT7LSIU,67766 +pip/_vendor/distlib/version.py,sha256=WG__LyAa2GwmA6qSoEJtvJE8REA1LZpbSizy8WvhJLk,23513 +pip/_vendor/distlib/wheel.py,sha256=pj5VVCjqZMcHvgizORWwAFPS7hOk61CZ59dxP8laQ4E,42943 +pip/_vendor/distro.py,sha256=O1EeHMq1-xAO373JI2_6pYEtd09yEkxtmrYkdY-9S-w,48414 +pip/_vendor/html5lib/__init__.py,sha256=BYzcKCqeEii52xDrqBFruhnmtmkiuHXFyFh-cglQ8mk,1160 +pip/_vendor/html5lib/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-39.pyc,, +pip/_vendor/html5lib/__pycache__/_inputstream.cpython-39.pyc,, +pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-39.pyc,, +pip/_vendor/html5lib/__pycache__/_utils.cpython-39.pyc,, +pip/_vendor/html5lib/__pycache__/constants.cpython-39.pyc,, +pip/_vendor/html5lib/__pycache__/html5parser.cpython-39.pyc,, +pip/_vendor/html5lib/__pycache__/serializer.cpython-39.pyc,, +pip/_vendor/html5lib/_ihatexml.py,sha256=ifOwF7pXqmyThIXc3boWc96s4MDezqRrRVp7FwDYUFs,16728 +pip/_vendor/html5lib/_inputstream.py,sha256=jErNASMlkgs7MpOM9Ve_VdLDJyFFweAjLuhVutZz33U,32353 +pip/_vendor/html5lib/_tokenizer.py,sha256=04mgA2sNTniutl2fxFv-ei5bns4iRaPxVXXHh_HrV_4,77040 +pip/_vendor/html5lib/_trie/__init__.py,sha256=nqfgO910329BEVJ5T4psVwQtjd2iJyEXQ2-X8c1YxwU,109 +pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-39.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/py.cpython-39.pyc,, +pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013 +pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 +pip/_vendor/html5lib/_utils.py,sha256=Dx9AKntksRjFT1veBj7I362pf5OgIaT0zglwq43RnfU,4931 +pip/_vendor/html5lib/constants.py,sha256=Ll-yzLU_jcjyAI_h57zkqZ7aQWE5t5xA4y_jQgoUUhw,83464 +pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-39.pyc,, +pip/_vendor/html5lib/filters/__pycache__/base.cpython-39.pyc,, +pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-39.pyc,, +pip/_vendor/html5lib/filters/__pycache__/lint.cpython-39.pyc,, +pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-39.pyc,, +pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-39.pyc,, +pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-39.pyc,, +pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919 +pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 +pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945 +pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643 +pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588 +pip/_vendor/html5lib/filters/sanitizer.py,sha256=m6oGmkBhkGAnn2nV6D4hE78SCZ6WEnK9rKdZB3uXBIc,26897 +pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214 +pip/_vendor/html5lib/html5parser.py,sha256=anr-aXre_ImfrkQ35c_rftKXxC80vJCREKe06Tq15HA,117186 +pip/_vendor/html5lib/serializer.py,sha256=_PpvcZF07cwE7xr9uKkZqh5f4UEaI8ltCU2xPJzaTpk,15759 +pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679 +pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-39.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-39.pyc,, +pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715 +pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776 +pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592 +pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-39.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-39.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-39.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-39.pyc,, +pip/_vendor/html5lib/treebuilders/base.py,sha256=z-o51vt9r_l2IDG5IioTOKGzZne4Fy3_Fc-7ztrOh4I,14565 +pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925 +pip/_vendor/html5lib/treebuilders/etree.py,sha256=w5ZFpKk6bAxnrwD2_BrF5EVC7vzz0L3LMi9Sxrbc_8w,12836 +pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9gqDjs-IxsPhBYa5cpvv2FZ1KZlG83Giusy2lFmvIkE,14766 +pip/_vendor/html5lib/treewalkers/__init__.py,sha256=OBPtc1TU5mGyy18QDMxKEyYEz0wxFUUNj5v0-XgmYhY,5719 +pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-39.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-39.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-39.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-39.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-39.pyc,, +pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476 +pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 +pip/_vendor/html5lib/treewalkers/etree.py,sha256=xo1L5m9VtkfpFJK0pFmkLVajhqYYVisVZn3k9kYpPkI,4551 +pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=_b0LAVWLcVu9WaU_-w3D8f0IRSpCbjf667V-3NRdhTw,6357 +pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 +pip/_vendor/idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +pip/_vendor/idna/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/idna/__pycache__/codec.cpython-39.pyc,, +pip/_vendor/idna/__pycache__/compat.cpython-39.pyc,, +pip/_vendor/idna/__pycache__/core.cpython-39.pyc,, +pip/_vendor/idna/__pycache__/idnadata.cpython-39.pyc,, +pip/_vendor/idna/__pycache__/intranges.cpython-39.pyc,, +pip/_vendor/idna/__pycache__/package_data.cpython-39.pyc,, +pip/_vendor/idna/__pycache__/uts46data.cpython-39.pyc,, +pip/_vendor/idna/codec.py,sha256=QsPFD3Je8gN17rfs14e7zTGRWlnL7bNf2ZqcHTRVYHs,3453 +pip/_vendor/idna/compat.py,sha256=5A9xR04puRHCsyjBNewZlVSiarth7K1bZqyEOeob1fA,360 +pip/_vendor/idna/core.py,sha256=P2QG1RwnhYqke_iRW32NRD5T05fPrEGItqm9TC7Oivk,12689 +pip/_vendor/idna/idnadata.py,sha256=MmZj0WS3ewh_OUIhfg3ZtTrMo_VTm0KEEwp21Mxo_yc,73831 +pip/_vendor/idna/intranges.py,sha256=EqgXwyATAn-CTACInqH9tYsYAitGB2VcQ50RZt_Cpjs,1933 +pip/_vendor/idna/package_data.py,sha256=_028B4fvadRIaXMwMYjhuQPP3AxTIt1IRE7X6RDR4Mk,21 +pip/_vendor/idna/uts46data.py,sha256=DGzwDQv8JijY17I_7ondo3stjFjNnjvVAbA-z0k1XOE,201849 +pip/_vendor/msgpack/__init__.py,sha256=2gJwcsTIaAtCM0GMi2rU-_Y6kILeeQuqRkrQ22jSANc,1118 +pip/_vendor/msgpack/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/msgpack/__pycache__/_version.cpython-39.pyc,, +pip/_vendor/msgpack/__pycache__/exceptions.cpython-39.pyc,, +pip/_vendor/msgpack/__pycache__/ext.cpython-39.pyc,, +pip/_vendor/msgpack/__pycache__/fallback.cpython-39.pyc,, +pip/_vendor/msgpack/_version.py,sha256=dFR03oACnj4lsKd1RnwD7BPMiVI_FMygdOL1TOBEw_U,20 +pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081 +pip/_vendor/msgpack/ext.py,sha256=4l356Y4sVEcvCla2dh_cL57vh4GMhZfa3kuWHFHYz6A,6088 +pip/_vendor/msgpack/fallback.py,sha256=Rpv1Ldey8f8ueRnQznD4ARKBn9dxM2PywVNkXI8IEeE,38026 +pip/_vendor/packaging/__about__.py,sha256=p_OQloqH2saadcbUQmWEsWK857dI6_ff5E3aSiCqGFA,661 +pip/_vendor/packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497 +pip/_vendor/packaging/__pycache__/__about__.cpython-39.pyc,, +pip/_vendor/packaging/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/packaging/__pycache__/_manylinux.cpython-39.pyc,, +pip/_vendor/packaging/__pycache__/_musllinux.cpython-39.pyc,, +pip/_vendor/packaging/__pycache__/_structures.cpython-39.pyc,, +pip/_vendor/packaging/__pycache__/markers.cpython-39.pyc,, +pip/_vendor/packaging/__pycache__/requirements.cpython-39.pyc,, +pip/_vendor/packaging/__pycache__/specifiers.cpython-39.pyc,, +pip/_vendor/packaging/__pycache__/tags.cpython-39.pyc,, +pip/_vendor/packaging/__pycache__/utils.cpython-39.pyc,, +pip/_vendor/packaging/__pycache__/version.cpython-39.pyc,, +pip/_vendor/packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488 +pip/_vendor/packaging/_musllinux.py,sha256=z5yeG1ygOPx4uUyLdqj-p8Dk5UBb5H_b0NIjW9yo8oA,4378 +pip/_vendor/packaging/_structures.py,sha256=TMiAgFbdUOPmIfDIfiHc3KFhSJ8kMjof2QS5I-2NyQ8,1629 +pip/_vendor/packaging/markers.py,sha256=AJBOcY8Oq0kYc570KuuPTkvuqjAlhufaE2c9sCUbm64,8487 +pip/_vendor/packaging/requirements.py,sha256=NtDlPBtojpn1IUC85iMjPNsUmufjpSlwnNA-Xb4m5NA,4676 +pip/_vendor/packaging/specifiers.py,sha256=MZ-fYcNL3u7pNrt-6g2EQO7AbRXkjc-SPEYwXMQbLmc,30964 +pip/_vendor/packaging/tags.py,sha256=akIerYw8W0sz4OW9HHozgawWnbt2GGOPm3sviW0jowY,15714 +pip/_vendor/packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200 +pip/_vendor/packaging/version.py,sha256=AOLsewGcHCskMRdz2ykO3Gv7U7YZwgzE5Pj9ifjd6S8,14479 +pip/_vendor/pep517/__init__.py,sha256=Y1bATL2qbFNN6M_DQa4yyrwqjpIiL-j9T6kBmR0DS14,130 +pip/_vendor/pep517/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/pep517/__pycache__/build.cpython-39.pyc,, +pip/_vendor/pep517/__pycache__/check.cpython-39.pyc,, +pip/_vendor/pep517/__pycache__/colorlog.cpython-39.pyc,, +pip/_vendor/pep517/__pycache__/compat.cpython-39.pyc,, +pip/_vendor/pep517/__pycache__/dirtools.cpython-39.pyc,, +pip/_vendor/pep517/__pycache__/envbuild.cpython-39.pyc,, +pip/_vendor/pep517/__pycache__/meta.cpython-39.pyc,, +pip/_vendor/pep517/__pycache__/wrappers.cpython-39.pyc,, +pip/_vendor/pep517/build.py,sha256=2bar6EdjwIz2Dlfy94qdxn3oA9mVnnny40mfoT5f-qI,3457 +pip/_vendor/pep517/check.py,sha256=bCORq1WrHjhpTONa-zpAqG0EB9rHNuhO1ORu6DsDuL8,6084 +pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098 +pip/_vendor/pep517/compat.py,sha256=NmLImE5oiDT3gbEhJ4w7xeoMFcpAPrGu_NltBytSJUY,1253 +pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129 +pip/_vendor/pep517/envbuild.py,sha256=zFde--rmzjXMLXcm7SA_3hDtgk5VCTA8hjpk88RbF6E,6100 +pip/_vendor/pep517/in_process/__init__.py,sha256=MyWoAi8JHdcBv7yXuWpUSVADbx6LSB9rZh7kTIgdA8Y,563 +pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-39.pyc,, +pip/_vendor/pep517/in_process/_in_process.py,sha256=D3waguyNSGcwosociD5USfcycYr2RCzCjYtxX5UHQmQ,11201 +pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463 +pip/_vendor/pep517/wrappers.py,sha256=impq7Cz_LL1iDF1iiOzYWB4MaEu6O6Gps7TJ5qsJz1Q,13429 +pip/_vendor/pkg_resources/__init__.py,sha256=NnpQ3g6BCHzpMgOR_OLBmYtniY4oOzdKpwqghfq_6ug,108287 +pip/_vendor/pkg_resources/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-39.pyc,, +pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562 +pip/_vendor/platformdirs/__init__.py,sha256=3iz938Grn-6IRg8gSuMxJtgiBfH0xqRqAlMBo-vPGUw,12859 +pip/_vendor/platformdirs/__main__.py,sha256=SzGvNkYWuosrWXs2yL2VqcXEh-kivWq3-53-BpTco0o,1140 +pip/_vendor/platformdirs/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/platformdirs/__pycache__/__main__.cpython-39.pyc,, +pip/_vendor/platformdirs/__pycache__/android.cpython-39.pyc,, +pip/_vendor/platformdirs/__pycache__/api.cpython-39.pyc,, +pip/_vendor/platformdirs/__pycache__/macos.cpython-39.pyc,, +pip/_vendor/platformdirs/__pycache__/unix.cpython-39.pyc,, +pip/_vendor/platformdirs/__pycache__/version.cpython-39.pyc,, +pip/_vendor/platformdirs/__pycache__/windows.cpython-39.pyc,, +pip/_vendor/platformdirs/android.py,sha256=dadYfG2oc900YVi5AONQWw2WEvk-kmgkZs5iiNSiWiE,3994 +pip/_vendor/platformdirs/api.py,sha256=yhRR6RkcZzPBfJD4Sn90vCHZbRMQ9nwtnRaa93X1wR8,4922 +pip/_vendor/platformdirs/macos.py,sha256=vIowPYKkHksJcWVjqHQoa-oI1i2D0S7gsSdyFzZDJEA,2619 +pip/_vendor/platformdirs/unix.py,sha256=7JdDnsyTFn2IHC8IFdiNYH7_R8VS-rPx8ivh4_dT1DU,6905 +pip/_vendor/platformdirs/version.py,sha256=uUssQTtUqVP-PxbOSNBzNGRW27X5u1GvOllg--kzyuw,80 +pip/_vendor/platformdirs/windows.py,sha256=91nNccR0CSxX_myMppSvUT1qtQao6kaO96e6ior8-Xw,6416 +pip/_vendor/progress/__init__.py,sha256=1HejNZtv2ouUNQeStUDAtZrtwkz_3FmYKQ476hJ7zOs,5294 +pip/_vendor/progress/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/progress/__pycache__/bar.cpython-39.pyc,, +pip/_vendor/progress/__pycache__/colors.cpython-39.pyc,, +pip/_vendor/progress/__pycache__/counter.cpython-39.pyc,, +pip/_vendor/progress/__pycache__/spinner.cpython-39.pyc,, +pip/_vendor/progress/bar.py,sha256=GbedY0oZ-Q1duXjmvVLO0tSf-uTSH7hJ3zzyI91Esws,2942 +pip/_vendor/progress/colors.py,sha256=cCYXQnYFYVmQKKmYEbQ_lj6SPSFzdw4FN98F2x2kR-U,2655 +pip/_vendor/progress/counter.py,sha256=zYt9DWH0_05s8Q9TrJwHVud-WwsyyaR3PwYtk5hxwwQ,1613 +pip/_vendor/progress/spinner.py,sha256=u5ElzW94XEiLGH-aAlr54VJtKfeK745xr6UfGvvflzU,1461 +pip/_vendor/pyparsing.py,sha256=J1b4z3S_KwyJW7hKGnoN-hXW9pgMIzIP6QThyY5yJq4,273394 +pip/_vendor/requests/__init__.py,sha256=g4Bh1QYh6JKjMS4YLobx0uOLq-41sINaXjvbhX2VI8g,5113 +pip/_vendor/requests/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/__version__.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/_internal_utils.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/adapters.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/api.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/auth.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/certs.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/compat.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/cookies.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/exceptions.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/help.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/hooks.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/models.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/packages.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/sessions.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/status_codes.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/structures.cpython-39.pyc,, +pip/_vendor/requests/__pycache__/utils.cpython-39.pyc,, +pip/_vendor/requests/__version__.py,sha256=PZEyPTSIN_jRIAIB51wV7pw81m3qAw0InSR7OrKZUnE,441 +pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 +pip/_vendor/requests/adapters.py,sha256=ERCyw7DS8_GmsyAHV0jo6amFT6nJGAcNjPE62H5V4mo,23532 +pip/_vendor/requests/api.py,sha256=hjuoP79IAEmX6Dysrw8t032cLfwLHxbI_wM4gC5G9t0,6402 +pip/_vendor/requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207 +pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465 +pip/_vendor/requests/compat.py,sha256=LQWuCR4qXk6w7-qQopXyz0WNHUdAD40k0mKnaAEf1-g,2045 +pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430 +pip/_vendor/requests/exceptions.py,sha256=dwIi512RCDqXJ2T81nLC88mqPNhUFnOI_CgKKDXhTO8,3250 +pip/_vendor/requests/help.py,sha256=dyhe3lcmHXnFCzDiZVjcGmVvvO_jtsfAm-AC542ndw8,3972 +pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757 +pip/_vendor/requests/models.py,sha256=9_LS_t1t6HbbaWFE3ZkxGmmHN2V8BgxziiOU84rrQ50,34924 +pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 +pip/_vendor/requests/sessions.py,sha256=57O4ud9yRL6eLYh-dtFbqC1kO4d_EwZcCgYXEkujlfs,30168 +pip/_vendor/requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188 +pip/_vendor/requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005 +pip/_vendor/requests/utils.py,sha256=U_-i6WxLw-67KEij43xHbcvL0DdeQ5Jbd4hfifWJzQY,31394 +pip/_vendor/resolvelib/__init__.py,sha256=fzWkeoLV8ol6l2fvBVRZZLylOePc9w9tKRvUb8RJsCY,537 +pip/_vendor/resolvelib/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/resolvelib/__pycache__/providers.cpython-39.pyc,, +pip/_vendor/resolvelib/__pycache__/reporters.cpython-39.pyc,, +pip/_vendor/resolvelib/__pycache__/resolvers.cpython-39.pyc,, +pip/_vendor/resolvelib/__pycache__/structs.cpython-39.pyc,, +pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-39.pyc,, +pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156 +pip/_vendor/resolvelib/providers.py,sha256=roVmFBItQJ0TkhNua65h8LdNny7rmeqVEXZu90QiP4o,5872 +pip/_vendor/resolvelib/reporters.py,sha256=hQvvXuuEBOyEWO8KDfLsWKVjX55UFMAUwO0YZMNpzAw,1364 +pip/_vendor/resolvelib/resolvers.py,sha256=UjFUEVrUa1hCzfEEakmjHEjYAL9J5ACJmwZyHFdmzvE,17540 +pip/_vendor/resolvelib/structs.py,sha256=IVIYof6sA_N4ZEiE1C1UhzTX495brCNnyCdgq6CYq28,4794 +pip/_vendor/six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549 +pip/_vendor/tenacity/__init__.py,sha256=GLLsTFD4Bd5VDgTR6mU_FxyOsrxc48qONorVaRebeD4,18257 +pip/_vendor/tenacity/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/tenacity/__pycache__/_asyncio.cpython-39.pyc,, +pip/_vendor/tenacity/__pycache__/_utils.cpython-39.pyc,, +pip/_vendor/tenacity/__pycache__/after.cpython-39.pyc,, +pip/_vendor/tenacity/__pycache__/before.cpython-39.pyc,, +pip/_vendor/tenacity/__pycache__/before_sleep.cpython-39.pyc,, +pip/_vendor/tenacity/__pycache__/nap.cpython-39.pyc,, +pip/_vendor/tenacity/__pycache__/retry.cpython-39.pyc,, +pip/_vendor/tenacity/__pycache__/stop.cpython-39.pyc,, +pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-39.pyc,, +pip/_vendor/tenacity/__pycache__/wait.cpython-39.pyc,, +pip/_vendor/tenacity/_asyncio.py,sha256=HEb0BVJEeBJE9P-m9XBxh1KcaF96BwoeqkJCL5sbVcQ,3314 +pip/_vendor/tenacity/_utils.py,sha256=-y68scDcyoqvTJuJJ0GTfjdSCljEYlbCYvgk7nM4NdM,1944 +pip/_vendor/tenacity/after.py,sha256=dlmyxxFy2uqpLXDr838DiEd7jgv2AGthsWHGYcGYsaI,1496 +pip/_vendor/tenacity/before.py,sha256=7XtvRmO0dRWUp8SVn24OvIiGFj8-4OP5muQRUiWgLh0,1376 +pip/_vendor/tenacity/before_sleep.py,sha256=ThyDvqKU5yle_IvYQz_b6Tp6UjUS0PhVp6zgqYl9U6Y,1908 +pip/_vendor/tenacity/nap.py,sha256=fRWvnz1aIzbIq9Ap3gAkAZgDH6oo5zxMrU6ZOVByq0I,1383 +pip/_vendor/tenacity/retry.py,sha256=62R71W59bQjuNyFKsDM7hE2aEkEPtwNBRA0tnsEvgSk,6645 +pip/_vendor/tenacity/stop.py,sha256=sKHmHaoSaW6sKu3dTxUVKr1-stVkY7lw4Y9yjZU30zQ,2790 +pip/_vendor/tenacity/tornadoweb.py,sha256=E8lWO2nwe6dJgoB-N2HhQprYLDLB_UdSgFnv-EN6wKE,2145 +pip/_vendor/tenacity/wait.py,sha256=e_Saa6I2tsNLpCL1t9897wN2fGb0XQMQlE4bU2t9V2w,6691 +pip/_vendor/tomli/__init__.py,sha256=z1Elt0nLAqU5Y0DOn9p__8QnLWavlEOpRyQikdYgKro,230 +pip/_vendor/tomli/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/tomli/__pycache__/_parser.cpython-39.pyc,, +pip/_vendor/tomli/__pycache__/_re.cpython-39.pyc,, +pip/_vendor/tomli/_parser.py,sha256=50BD4o9YbzFAGAYyZLqZC8F81DQ7iWWyJnrHNwBKa6A,22415 +pip/_vendor/tomli/_re.py,sha256=5GPfgXKteg7wRFCF-DzlkAPI2ilHbkMK2-JC49F-AJQ,2681 +pip/_vendor/urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763 +pip/_vendor/urllib3/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/urllib3/__pycache__/_collections.cpython-39.pyc,, +pip/_vendor/urllib3/__pycache__/_version.cpython-39.pyc,, +pip/_vendor/urllib3/__pycache__/connection.cpython-39.pyc,, +pip/_vendor/urllib3/__pycache__/connectionpool.cpython-39.pyc,, +pip/_vendor/urllib3/__pycache__/exceptions.cpython-39.pyc,, +pip/_vendor/urllib3/__pycache__/fields.cpython-39.pyc,, +pip/_vendor/urllib3/__pycache__/filepost.cpython-39.pyc,, +pip/_vendor/urllib3/__pycache__/poolmanager.cpython-39.pyc,, +pip/_vendor/urllib3/__pycache__/request.cpython-39.pyc,, +pip/_vendor/urllib3/__pycache__/response.cpython-39.pyc,, +pip/_vendor/urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811 +pip/_vendor/urllib3/_version.py,sha256=CA4bKbKLwUBfKitbVR-44Whe53HWyInIVElDQQniAJU,63 +pip/_vendor/urllib3/connection.py,sha256=8TiEbQrJMgySqOllKNeX5tMv8nluKRjNj5j9hyzS6x0,20080 +pip/_vendor/urllib3/connectionpool.py,sha256=FQoodlNAP1KeUi4htGdl5TJEvKL5LWisCbmFNewxRpg,37587 +pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-39.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-39.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-39.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-39.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-39.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-39.pyc,, +pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 +pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-39.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-39.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=eRy1Mj-wpg7sR6-OSvnSV4jUbjMT464dLN_CWxbIRVw,17649 +pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922 +pip/_vendor/urllib3/contrib/appengine.py,sha256=lfzpHFmJiO82shClLEm3QB62SYgHWnjpZOH_2JhU5Tc,11034 +pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=ej9gGvfAb2Gt00lafFp45SIoRz-QwrQ4WChm6gQmAlM,4538 +pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=DD4pInv_3OEEGffEFynBoirc8ldR789sLmGSKukzA0E,16900 +pip/_vendor/urllib3/contrib/securetransport.py,sha256=4qUKo7PUV-vVIqXmr2BD-sH7qplB918jiD5eNsRI9vU,34449 +pip/_vendor/urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097 +pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217 +pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579 +pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440 +pip/_vendor/urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108 +pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/urllib3/packages/__pycache__/six.cpython-39.pyc,, +pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-39.pyc,, +pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417 +pip/_vendor/urllib3/packages/six.py,sha256=1LVW7ljqRirFlfExjwl-v1B7vSAUNTmzGMs-qays2zg,34666 +pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=ZVMwCkHx-py8ERsxxM3Il-MiREZktV-8iLBmCfRRHI4,927 +pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-39.pyc,, +pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679 +pip/_vendor/urllib3/poolmanager.py,sha256=zcldh-BB6YUuyML51JdMhusEeaSJoXFlquGRZ1haa7Q,20716 +pip/_vendor/urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985 +pip/_vendor/urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203 +pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155 +pip/_vendor/urllib3/util/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/connection.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/proxy.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/queue.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/request.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/response.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/retry.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/timeout.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/url.cpython-39.pyc,, +pip/_vendor/urllib3/util/__pycache__/wait.cpython-39.pyc,, +pip/_vendor/urllib3/util/connection.py,sha256=KykjNIXzUZEzeKEOpl5xvKs6IsESXP9o9eTrjE0W_Ys,4920 +pip/_vendor/urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605 +pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498 +pip/_vendor/urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123 +pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510 +pip/_vendor/urllib3/util/retry.py,sha256=vhwG-XxNEJQ9Bc-JQjE5qV4OkKG0ojgqXzRv2y-hFC0,21438 +pip/_vendor/urllib3/util/ssl_.py,sha256=X4-AqW91aYPhPx6-xbf66yHFQKbqqfC_5Zt4WkLX1Hc,17177 +pip/_vendor/urllib3/util/ssltransport.py,sha256=F_UncOXGcc-MgeWFTA1H4QCt_RRNQXRbF6onje3SyHY,6931 +pip/_vendor/urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003 +pip/_vendor/urllib3/util/url.py,sha256=QVEzcbHipbXyCWwH6R4K4TR-N8T4LM55WEMwNUTBmLE,14047 +pip/_vendor/urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404 +pip/_vendor/vendor.txt,sha256=vux9Tgc3pSRZZnXz9TNDdn514NdkDdnb-QPC0LCHkK4,432 +pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 +pip/_vendor/webencodings/__pycache__/__init__.cpython-39.pyc,, +pip/_vendor/webencodings/__pycache__/labels.cpython-39.pyc,, +pip/_vendor/webencodings/__pycache__/mklabels.cpython-39.pyc,, +pip/_vendor/webencodings/__pycache__/tests.cpython-39.pyc,, +pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-39.pyc,, +pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 +pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 +pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 +pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 +pip/py.typed,sha256=EBVvvPRTn_eIpz5e5QztSCdrMX7Qwd7VP93RSoIlZ2I,286 diff --git a/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/REQUESTED b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/WHEEL b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/entry_points.txt b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/entry_points.txt new file mode 100644 index 0000000..9609f72 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip._internal.cli.main:main +pip3 = pip._internal.cli.main:main +pip3.9 = pip._internal.cli.main:main + diff --git a/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/top_level.txt b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/top_level.txt new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip-21.3.1.dist-info/top_level.txt @@ -0,0 +1 @@ +pip diff --git a/.venv/lib/python3.9/site-packages/pip/__init__.py b/.venv/lib/python3.9/site-packages/pip/__init__.py new file mode 100644 index 0000000..acead99 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/__init__.py @@ -0,0 +1,13 @@ +from typing import List, Optional + +__version__ = "21.3.1" + + +def main(args: Optional[List[str]] = None) -> int: + """This is an internal API only meant for use by pip's own console scripts. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/.venv/lib/python3.9/site-packages/pip/__main__.py b/.venv/lib/python3.9/site-packages/pip/__main__.py new file mode 100644 index 0000000..fe34a7b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/__main__.py @@ -0,0 +1,31 @@ +import os +import sys +import warnings + +# Remove '' and current working directory from the first entry +# of sys.path, if present to avoid using current directory +# in pip commands check, freeze, install, list and show, +# when invoked as python -m pip +if sys.path[0] in ("", os.getcwd()): + sys.path.pop(0) + +# If we are running from a wheel, add the wheel to sys.path +# This allows the usage python pip-*.whl/pip install pip-*.whl +if __package__ == "": + # __file__ is pip-*.whl/pip/__main__.py + # first dirname call strips of '/__main__.py', second strips off '/pip' + # Resulting path is the name of the wheel itself + # Add that to sys.path so we can import pip + path = os.path.dirname(os.path.dirname(__file__)) + sys.path.insert(0, path) + +if __name__ == "__main__": + # Work around the error reported in #9540, pending a proper fix. + # Note: It is essential the warning filter is set *before* importing + # pip, as the deprecation happens at import time, not runtime. + warnings.filterwarnings( + "ignore", category=DeprecationWarning, module=".*packaging\\.version" + ) + from pip._internal.cli.main import main as _main + + sys.exit(_main()) diff --git a/.venv/lib/python3.9/site-packages/pip/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..f23dd40 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/__pycache__/__main__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/__pycache__/__main__.cpython-39.pyc new file mode 100644 index 0000000..b408b96 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/__pycache__/__main__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/__init__.py new file mode 100644 index 0000000..faf25af --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/__init__.py @@ -0,0 +1,19 @@ +from typing import List, Optional + +import pip._internal.utils.inject_securetransport # noqa +from pip._internal.utils import _log + +# init_logging() must be called before any call to logging.getLogger() +# which happens at import of most modules. +_log.init_logging() + + +def main(args: (Optional[List[str]]) = None) -> int: + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args, _nowarn=True) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..d4b999f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/build_env.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/build_env.cpython-39.pyc new file mode 100644 index 0000000..8056533 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/build_env.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/cache.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/cache.cpython-39.pyc new file mode 100644 index 0000000..a6bf439 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/cache.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/configuration.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/configuration.cpython-39.pyc new file mode 100644 index 0000000..49d2847 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/configuration.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/exceptions.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/exceptions.cpython-39.pyc new file mode 100644 index 0000000..8c5cee3 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/exceptions.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/main.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/main.cpython-39.pyc new file mode 100644 index 0000000..eb3880a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/main.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/pyproject.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/pyproject.cpython-39.pyc new file mode 100644 index 0000000..04e9ef0 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/pyproject.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc new file mode 100644 index 0000000..332053c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-39.pyc new file mode 100644 index 0000000..28914da Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/build_env.py b/.venv/lib/python3.9/site-packages/pip/_internal/build_env.py new file mode 100644 index 0000000..8faf1cf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/build_env.py @@ -0,0 +1,293 @@ +"""Build Environment used for isolation during sdist building +""" + +import contextlib +import logging +import os +import pathlib +import sys +import textwrap +import zipfile +from collections import OrderedDict +from sysconfig import get_paths +from types import TracebackType +from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Set, Tuple, Type + +from pip._vendor.certifi import where +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.version import Version + +from pip import __file__ as pip_location +from pip._internal.cli.spinners import open_spinner +from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib +from pip._internal.metadata import get_environment +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds + +if TYPE_CHECKING: + from pip._internal.index.package_finder import PackageFinder + +logger = logging.getLogger(__name__) + + +class _Prefix: + def __init__(self, path: str) -> None: + self.path = path + self.setup = False + self.bin_dir = get_paths( + "nt" if os.name == "nt" else "posix_prefix", + vars={"base": path, "platbase": path}, + )["scripts"] + self.lib_dirs = get_prefixed_libs(path) + + +@contextlib.contextmanager +def _create_standalone_pip() -> Iterator[str]: + """Create a "standalone pip" zip file. + + The zip file's content is identical to the currently-running pip. + It will be used to install requirements into the build environment. + """ + source = pathlib.Path(pip_location).resolve().parent + + # Return the current instance if `source` is not a directory. We can't build + # a zip from this, and it likely means the instance is already standalone. + if not source.is_dir(): + yield str(source) + return + + with TempDirectory(kind="standalone-pip") as tmp_dir: + pip_zip = os.path.join(tmp_dir.path, "__env_pip__.zip") + kwargs = {} + if sys.version_info >= (3, 8): + kwargs["strict_timestamps"] = False + with zipfile.ZipFile(pip_zip, "w", **kwargs) as zf: + for child in source.rglob("*"): + zf.write(child, child.relative_to(source.parent).as_posix()) + yield os.path.join(pip_zip, "pip") + + +class BuildEnvironment: + """Creates and manages an isolated environment to install build deps""" + + def __init__(self) -> None: + temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True) + + self._prefixes = OrderedDict( + (name, _Prefix(os.path.join(temp_dir.path, name))) + for name in ("normal", "overlay") + ) + + self._bin_dirs: List[str] = [] + self._lib_dirs: List[str] = [] + for prefix in reversed(list(self._prefixes.values())): + self._bin_dirs.append(prefix.bin_dir) + self._lib_dirs.extend(prefix.lib_dirs) + + # Customize site to: + # - ensure .pth files are honored + # - prevent access to system site packages + system_sites = { + os.path.normcase(site) for site in (get_purelib(), get_platlib()) + } + self._site_dir = os.path.join(temp_dir.path, "site") + if not os.path.exists(self._site_dir): + os.mkdir(self._site_dir) + with open(os.path.join(self._site_dir, "sitecustomize.py"), "w") as fp: + fp.write( + textwrap.dedent( + """ + import os, site, sys + + # First, drop system-sites related paths. + original_sys_path = sys.path[:] + known_paths = set() + for path in {system_sites!r}: + site.addsitedir(path, known_paths=known_paths) + system_paths = set( + os.path.normcase(path) + for path in sys.path[len(original_sys_path):] + ) + original_sys_path = [ + path for path in original_sys_path + if os.path.normcase(path) not in system_paths + ] + sys.path = original_sys_path + + # Second, add lib directories. + # ensuring .pth file are processed. + for path in {lib_dirs!r}: + assert not path in sys.path + site.addsitedir(path) + """ + ).format(system_sites=system_sites, lib_dirs=self._lib_dirs) + ) + + def __enter__(self) -> None: + self._save_env = { + name: os.environ.get(name, None) + for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH") + } + + path = self._bin_dirs[:] + old_path = self._save_env["PATH"] + if old_path: + path.extend(old_path.split(os.pathsep)) + + pythonpath = [self._site_dir] + + os.environ.update( + { + "PATH": os.pathsep.join(path), + "PYTHONNOUSERSITE": "1", + "PYTHONPATH": os.pathsep.join(pythonpath), + } + ) + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + for varname, old_value in self._save_env.items(): + if old_value is None: + os.environ.pop(varname, None) + else: + os.environ[varname] = old_value + + def check_requirements( + self, reqs: Iterable[str] + ) -> Tuple[Set[Tuple[str, str]], Set[str]]: + """Return 2 sets: + - conflicting requirements: set of (installed, wanted) reqs tuples + - missing requirements: set of reqs + """ + missing = set() + conflicting = set() + if reqs: + env = get_environment(self._lib_dirs) + for req_str in reqs: + req = Requirement(req_str) + dist = env.get_distribution(req.name) + if not dist: + missing.add(req_str) + continue + if isinstance(dist.version, Version): + installed_req_str = f"{req.name}=={dist.version}" + else: + installed_req_str = f"{req.name}==={dist.version}" + if dist.version not in req.specifier: + conflicting.add((installed_req_str, req_str)) + # FIXME: Consider direct URL? + return conflicting, missing + + def install_requirements( + self, + finder: "PackageFinder", + requirements: Iterable[str], + prefix_as_string: str, + message: str, + ) -> None: + prefix = self._prefixes[prefix_as_string] + assert not prefix.setup + prefix.setup = True + if not requirements: + return + with contextlib.ExitStack() as ctx: + # TODO: Remove this block when dropping 3.6 support. Python 3.6 + # lacks importlib.resources and pep517 has issues loading files in + # a zip, so we fallback to the "old" method by adding the current + # pip directory to the child process's sys.path. + if sys.version_info < (3, 7): + pip_runnable = os.path.dirname(pip_location) + else: + pip_runnable = ctx.enter_context(_create_standalone_pip()) + self._install_requirements( + pip_runnable, + finder, + requirements, + prefix, + message, + ) + + @staticmethod + def _install_requirements( + pip_runnable: str, + finder: "PackageFinder", + requirements: Iterable[str], + prefix: _Prefix, + message: str, + ) -> None: + args: List[str] = [ + sys.executable, + pip_runnable, + "install", + "--ignore-installed", + "--no-user", + "--prefix", + prefix.path, + "--no-warn-script-location", + ] + if logger.getEffectiveLevel() <= logging.DEBUG: + args.append("-v") + for format_control in ("no_binary", "only_binary"): + formats = getattr(finder.format_control, format_control) + args.extend( + ( + "--" + format_control.replace("_", "-"), + ",".join(sorted(formats or {":none:"})), + ) + ) + + index_urls = finder.index_urls + if index_urls: + args.extend(["-i", index_urls[0]]) + for extra_index in index_urls[1:]: + args.extend(["--extra-index-url", extra_index]) + else: + args.append("--no-index") + for link in finder.find_links: + args.extend(["--find-links", link]) + + for host in finder.trusted_hosts: + args.extend(["--trusted-host", host]) + if finder.allow_all_prereleases: + args.append("--pre") + if finder.prefer_binary: + args.append("--prefer-binary") + args.append("--") + args.extend(requirements) + extra_environ = {"_PIP_STANDALONE_CERT": where()} + with open_spinner(message) as spinner: + call_subprocess(args, spinner=spinner, extra_environ=extra_environ) + + +class NoOpBuildEnvironment(BuildEnvironment): + """A no-op drop-in replacement for BuildEnvironment""" + + def __init__(self) -> None: + pass + + def __enter__(self) -> None: + pass + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + pass + + def cleanup(self) -> None: + pass + + def install_requirements( + self, + finder: "PackageFinder", + requirements: Iterable[str], + prefix_as_string: str, + message: str, + ) -> None: + raise NotImplementedError() diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cache.py b/.venv/lib/python3.9/site-packages/pip/_internal/cache.py new file mode 100644 index 0000000..1d6df22 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cache.py @@ -0,0 +1,264 @@ +"""Cache Management +""" + +import hashlib +import json +import logging +import os +from typing import Any, Dict, List, Optional, Set + +from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InvalidWheelFilename +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.urls import path_to_url + +logger = logging.getLogger(__name__) + + +def _hash_dict(d: Dict[str, str]) -> str: + """Return a stable sha224 of a dictionary.""" + s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) + return hashlib.sha224(s.encode("ascii")).hexdigest() + + +class Cache: + """An abstract class - provides cache directories for data from links + + + :param cache_dir: The root of the cache. + :param format_control: An object of FormatControl class to limit + binaries being read from the cache. + :param allowed_formats: which formats of files the cache should store. + ('binary' and 'source' are the only allowed values) + """ + + def __init__( + self, cache_dir: str, format_control: FormatControl, allowed_formats: Set[str] + ) -> None: + super().__init__() + assert not cache_dir or os.path.isabs(cache_dir) + self.cache_dir = cache_dir or None + self.format_control = format_control + self.allowed_formats = allowed_formats + + _valid_formats = {"source", "binary"} + assert self.allowed_formats.union(_valid_formats) == _valid_formats + + def _get_cache_path_parts(self, link: Link) -> List[str]: + """Get parts of part that must be os.path.joined with cache_dir""" + + # We want to generate an url to use as our cache key, we don't want to + # just re-use the URL because it might have other items in the fragment + # and we don't care about those. + key_parts = {"url": link.url_without_fragment} + if link.hash_name is not None and link.hash is not None: + key_parts[link.hash_name] = link.hash + if link.subdirectory_fragment: + key_parts["subdirectory"] = link.subdirectory_fragment + + # Include interpreter name, major and minor version in cache key + # to cope with ill-behaved sdists that build a different wheel + # depending on the python version their setup.py is being run on, + # and don't encode the difference in compatibility tags. + # https://github.com/pypa/pip/issues/7296 + key_parts["interpreter_name"] = interpreter_name() + key_parts["interpreter_version"] = interpreter_version() + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and + # thus less secure). However the differences don't make a lot of + # difference for our use case here. + hashed = _hash_dict(key_parts) + + # We want to nest the directories some to prevent having a ton of top + # level directories where we might run out of sub directories on some + # FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + return parts + + def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]: + can_not_cache = not self.cache_dir or not canonical_package_name or not link + if can_not_cache: + return [] + + formats = self.format_control.get_allowed_formats(canonical_package_name) + if not self.allowed_formats.intersection(formats): + return [] + + candidates = [] + path = self.get_path_for_link(link) + if os.path.isdir(path): + for candidate in os.listdir(path): + candidates.append((candidate, path)) + return candidates + + def get_path_for_link(self, link: Link) -> str: + """Return a directory to store cached items in for link.""" + raise NotImplementedError() + + def get( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Link: + """Returns a link to a cached item if it exists, otherwise returns the + passed link. + """ + raise NotImplementedError() + + +class SimpleWheelCache(Cache): + """A cache of wheels for future installs.""" + + def __init__(self, cache_dir: str, format_control: FormatControl) -> None: + super().__init__(cache_dir, format_control, {"binary"}) + + def get_path_for_link(self, link: Link) -> str: + """Return a directory to store cached wheels for link + + Because there are M wheels for any one sdist, we provide a directory + to cache them in, and then consult that directory when looking up + cache hits. + + We only insert things into the cache if they have plausible version + numbers, so that we don't contaminate the cache with things that were + not unique. E.g. ./package might have dozens of installs done for it + and build a version of 0.0...and if we built and cached a wheel, we'd + end up using the same wheel even if the source has been edited. + + :param link: The link of the sdist for which this will cache wheels. + """ + parts = self._get_cache_path_parts(link) + assert self.cache_dir + # Store wheels within the root cache_dir + return os.path.join(self.cache_dir, "wheels", *parts) + + def get( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Link: + candidates = [] + + if not package_name: + return link + + canonical_package_name = canonicalize_name(package_name) + for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name): + try: + wheel = Wheel(wheel_name) + except InvalidWheelFilename: + continue + if canonicalize_name(wheel.name) != canonical_package_name: + logger.debug( + "Ignoring cached wheel %s for %s as it " + "does not match the expected distribution name %s.", + wheel_name, + link, + package_name, + ) + continue + if not wheel.supported(supported_tags): + # Built for a different python/arch/etc + continue + candidates.append( + ( + wheel.support_index_min(supported_tags), + wheel_name, + wheel_dir, + ) + ) + + if not candidates: + return link + + _, wheel_name, wheel_dir = min(candidates) + return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) + + +class EphemWheelCache(SimpleWheelCache): + """A SimpleWheelCache that creates it's own temporary cache directory""" + + def __init__(self, format_control: FormatControl) -> None: + self._temp_dir = TempDirectory( + kind=tempdir_kinds.EPHEM_WHEEL_CACHE, + globally_managed=True, + ) + + super().__init__(self._temp_dir.path, format_control) + + +class CacheEntry: + def __init__( + self, + link: Link, + persistent: bool, + ): + self.link = link + self.persistent = persistent + + +class WheelCache(Cache): + """Wraps EphemWheelCache and SimpleWheelCache into a single Cache + + This Cache allows for gracefully degradation, using the ephem wheel cache + when a certain link is not found in the simple wheel cache first. + """ + + def __init__(self, cache_dir: str, format_control: FormatControl) -> None: + super().__init__(cache_dir, format_control, {"binary"}) + self._wheel_cache = SimpleWheelCache(cache_dir, format_control) + self._ephem_cache = EphemWheelCache(format_control) + + def get_path_for_link(self, link: Link) -> str: + return self._wheel_cache.get_path_for_link(link) + + def get_ephem_path_for_link(self, link: Link) -> str: + return self._ephem_cache.get_path_for_link(link) + + def get( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Link: + cache_entry = self.get_cache_entry(link, package_name, supported_tags) + if cache_entry is None: + return link + return cache_entry.link + + def get_cache_entry( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Optional[CacheEntry]: + """Returns a CacheEntry with a link to a cached item if it exists or + None. The cache entry indicates if the item was found in the persistent + or ephemeral cache. + """ + retval = self._wheel_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=True) + + retval = self._ephem_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=False) + + return None diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__init__.py new file mode 100644 index 0000000..e589bb9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__init__.py @@ -0,0 +1,4 @@ +"""Subpackage containing all of pip's command line interface related code +""" + +# This file intentionally does not import submodules diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..f337d09 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc new file mode 100644 index 0000000..c37fccc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-39.pyc new file mode 100644 index 0000000..888a4cd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc new file mode 100644 index 0000000..812088b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-39.pyc new file mode 100644 index 0000000..bf1e880 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main.cpython-39.pyc new file mode 100644 index 0000000..559ba04 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc new file mode 100644 index 0000000..210a863 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/parser.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/parser.cpython-39.pyc new file mode 100644 index 0000000..e6683de Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/parser.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc new file mode 100644 index 0000000..6fe111e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-39.pyc new file mode 100644 index 0000000..797fc98 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-39.pyc new file mode 100644 index 0000000..364b8bd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc new file mode 100644 index 0000000..04e91f0 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/autocompletion.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/autocompletion.py new file mode 100644 index 0000000..3cad148 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/autocompletion.py @@ -0,0 +1,163 @@ +"""Logic that powers autocompletion installed by ``pip completion``. +""" + +import optparse +import os +import sys +from itertools import chain +from typing import Any, Iterable, List, Optional + +from pip._internal.cli.main_parser import create_main_parser +from pip._internal.commands import commands_dict, create_command +from pip._internal.metadata import get_default_environment + + +def autocomplete() -> None: + """Entry Point for completion of main and subcommand options.""" + # Don't complete if user hasn't sourced bash_completion file. + if "PIP_AUTO_COMPLETE" not in os.environ: + return + cwords = os.environ["COMP_WORDS"].split()[1:] + cword = int(os.environ["COMP_CWORD"]) + try: + current = cwords[cword - 1] + except IndexError: + current = "" + + parser = create_main_parser() + subcommands = list(commands_dict) + options = [] + + # subcommand + subcommand_name: Optional[str] = None + for word in cwords: + if word in subcommands: + subcommand_name = word + break + # subcommand options + if subcommand_name is not None: + # special case: 'help' subcommand has no options + if subcommand_name == "help": + sys.exit(1) + # special case: list locally installed dists for show and uninstall + should_list_installed = not current.startswith("-") and subcommand_name in [ + "show", + "uninstall", + ] + if should_list_installed: + env = get_default_environment() + lc = current.lower() + installed = [ + dist.canonical_name + for dist in env.iter_installed_distributions(local_only=True) + if dist.canonical_name.startswith(lc) + and dist.canonical_name not in cwords[1:] + ] + # if there are no dists installed, fall back to option completion + if installed: + for dist in installed: + print(dist) + sys.exit(1) + + subcommand = create_command(subcommand_name) + + for opt in subcommand.parser.option_list_all: + if opt.help != optparse.SUPPRESS_HELP: + for opt_str in opt._long_opts + opt._short_opts: + options.append((opt_str, opt.nargs)) + + # filter out previously specified options from available options + prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]] + options = [(x, v) for (x, v) in options if x not in prev_opts] + # filter options by current input + options = [(k, v) for k, v in options if k.startswith(current)] + # get completion type given cwords and available subcommand options + completion_type = get_path_completion_type( + cwords, + cword, + subcommand.parser.option_list_all, + ) + # get completion files and directories if ``completion_type`` is + # ````, ```` or ```` + if completion_type: + paths = auto_complete_paths(current, completion_type) + options = [(path, 0) for path in paths] + for option in options: + opt_label = option[0] + # append '=' to options which require args + if option[1] and option[0][:2] == "--": + opt_label += "=" + print(opt_label) + else: + # show main parser options only when necessary + + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + flattened_opts = chain.from_iterable(opts) + if current.startswith("-"): + for opt in flattened_opts: + if opt.help != optparse.SUPPRESS_HELP: + subcommands += opt._long_opts + opt._short_opts + else: + # get completion type given cwords and all available options + completion_type = get_path_completion_type(cwords, cword, flattened_opts) + if completion_type: + subcommands = list(auto_complete_paths(current, completion_type)) + + print(" ".join([x for x in subcommands if x.startswith(current)])) + sys.exit(1) + + +def get_path_completion_type( + cwords: List[str], cword: int, opts: Iterable[Any] +) -> Optional[str]: + """Get the type of path completion (``file``, ``dir``, ``path`` or None) + + :param cwords: same as the environmental variable ``COMP_WORDS`` + :param cword: same as the environmental variable ``COMP_CWORD`` + :param opts: The available options to check + :return: path completion type (``file``, ``dir``, ``path`` or None) + """ + if cword < 2 or not cwords[cword - 2].startswith("-"): + return None + for opt in opts: + if opt.help == optparse.SUPPRESS_HELP: + continue + for o in str(opt).split("/"): + if cwords[cword - 2].split("=")[0] == o: + if not opt.metavar or any( + x in ("path", "file", "dir") for x in opt.metavar.split("/") + ): + return opt.metavar + return None + + +def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]: + """If ``completion_type`` is ``file`` or ``path``, list all regular files + and directories starting with ``current``; otherwise only list directories + starting with ``current``. + + :param current: The word to be completed + :param completion_type: path completion type(`file`, `path` or `dir`)i + :return: A generator of regular files and/or directories + """ + directory, filename = os.path.split(current) + current_path = os.path.abspath(directory) + # Don't complete paths if they can't be accessed + if not os.access(current_path, os.R_OK): + return + filename = os.path.normcase(filename) + # list all files that start with ``filename`` + file_list = ( + x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename) + ) + for f in file_list: + opt = os.path.join(current_path, f) + comp_file = os.path.normcase(os.path.join(directory, f)) + # complete regular files when there is not ```` after option + # complete directories when there is ````, ```` or + # ````after option + if completion_type != "dir" and os.path.isfile(opt): + yield comp_file + elif os.path.isdir(opt): + yield os.path.join(comp_file, "") diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/base_command.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/base_command.py new file mode 100644 index 0000000..0afe7e7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/base_command.py @@ -0,0 +1,214 @@ +"""Base Command class, and related routines""" + +import functools +import logging +import logging.config +import optparse +import os +import sys +import traceback +from optparse import Values +from typing import Any, Callable, List, Optional, Tuple + +from pip._internal.cli import cmdoptions +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip._internal.cli.status_codes import ( + ERROR, + PREVIOUS_BUILD_DIR_ERROR, + UNKNOWN_ERROR, + VIRTUALENV_NOT_FOUND, +) +from pip._internal.exceptions import ( + BadCommand, + CommandError, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + UninstallationError, +) +from pip._internal.utils.filesystem import check_path_owner +from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging +from pip._internal.utils.misc import get_prog, normalize_path +from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry +from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry +from pip._internal.utils.virtualenv import running_under_virtualenv + +__all__ = ["Command"] + +logger = logging.getLogger(__name__) + + +class Command(CommandContextMixIn): + usage: str = "" + ignore_require_venv: bool = False + + def __init__(self, name: str, summary: str, isolated: bool = False) -> None: + super().__init__() + + self.name = name + self.summary = summary + self.parser = ConfigOptionParser( + usage=self.usage, + prog=f"{get_prog()} {name}", + formatter=UpdatingDefaultsHelpFormatter(), + add_help_option=False, + name=name, + description=self.__doc__, + isolated=isolated, + ) + + self.tempdir_registry: Optional[TempDirRegistry] = None + + # Commands should add options to this option group + optgroup_name = f"{self.name.capitalize()} Options" + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options + gen_opts = cmdoptions.make_option_group( + cmdoptions.general_group, + self.parser, + ) + self.parser.add_option_group(gen_opts) + + self.add_options() + + def add_options(self) -> None: + pass + + def handle_pip_version_check(self, options: Values) -> None: + """ + This is a no-op so that commands by default do not do the pip version + check. + """ + # Make sure we do the pip version check if the index_group options + # are present. + assert not hasattr(options, "no_index") + + def run(self, options: Values, args: List[str]) -> int: + raise NotImplementedError + + def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]: + # factored out for testability + return self.parser.parse_args(args) + + def main(self, args: List[str]) -> int: + try: + with self.main_context(): + return self._main(args) + finally: + logging.shutdown() + + def _main(self, args: List[str]) -> int: + # We must initialize this before the tempdir manager, otherwise the + # configuration would not be accessible by the time we clean up the + # tempdir manager. + self.tempdir_registry = self.enter_context(tempdir_registry()) + # Intentionally set as early as possible so globally-managed temporary + # directories are available to the rest of the code. + self.enter_context(global_tempdir_manager()) + + options, args = self.parse_args(args) + + # Set verbosity so that it can be used elsewhere. + self.verbosity = options.verbose - options.quiet + + level_number = setup_logging( + verbosity=self.verbosity, + no_color=options.no_color, + user_log_file=options.log, + ) + + # TODO: Try to get these passing down from the command? + # without resorting to os.environ to hold these. + # This also affects isolated builds and it should. + + if options.no_input: + os.environ["PIP_NO_INPUT"] = "1" + + if options.exists_action: + os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action) + + if options.require_venv and not self.ignore_require_venv: + # If a venv is required check if it can really be found + if not running_under_virtualenv(): + logger.critical("Could not find an activated virtualenv (required).") + sys.exit(VIRTUALENV_NOT_FOUND) + + if options.cache_dir: + options.cache_dir = normalize_path(options.cache_dir) + if not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "or is not writable by the current user. The cache " + "has been disabled. Check the permissions and owner of " + "that directory. If executing pip with sudo, you should " + "use sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + if "2020-resolver" in options.features_enabled: + logger.warning( + "--use-feature=2020-resolver no longer has any effect, " + "since it is now the default dependency resolver in pip. " + "This will become an error in pip 21.0." + ) + + def intercepts_unhandled_exc( + run_func: Callable[..., int] + ) -> Callable[..., int]: + @functools.wraps(run_func) + def exc_logging_wrapper(*args: Any) -> int: + try: + status = run_func(*args) + assert isinstance(status, int) + return status + except PreviousBuildDirError as exc: + logger.critical(str(exc)) + logger.debug("Exception information:", exc_info=True) + + return PREVIOUS_BUILD_DIR_ERROR + except ( + InstallationError, + UninstallationError, + BadCommand, + NetworkConnectionError, + ) as exc: + logger.critical(str(exc)) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except CommandError as exc: + logger.critical("%s", exc) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except BrokenStdoutLoggingError: + # Bypass our logger and write any remaining messages to + # stderr because stdout no longer works. + print("ERROR: Pipe to stdout was broken", file=sys.stderr) + if level_number <= logging.DEBUG: + traceback.print_exc(file=sys.stderr) + + return ERROR + except KeyboardInterrupt: + logger.critical("Operation cancelled by user") + logger.debug("Exception information:", exc_info=True) + + return ERROR + except BaseException: + logger.critical("Exception:", exc_info=True) + + return UNKNOWN_ERROR + + return exc_logging_wrapper + + try: + if not options.debug_mode: + run = intercepts_unhandled_exc(self.run) + else: + run = self.run + return run(options, args) + finally: + self.handle_pip_version_check(options) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/cmdoptions.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/cmdoptions.py new file mode 100644 index 0000000..626fd00 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/cmdoptions.py @@ -0,0 +1,1010 @@ +""" +shared options and groups + +The principle here is to define options once, but *not* instantiate them +globally. One reason being that options with action='append' can carry state +between parses. pip parses general options twice internally, and shouldn't +pass on state. To be consistent, all options will follow this design. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import os +import textwrap +import warnings +from functools import partial +from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values +from textwrap import dedent +from typing import Any, Callable, Dict, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.parser import ConfigOptionParser +from pip._internal.cli.progress_bars import BAR_TYPES +from pip._internal.exceptions import CommandError +from pip._internal.locations import USER_CACHE_DIR, get_src_prefix +from pip._internal.models.format_control import FormatControl +from pip._internal.models.index import PyPI +from pip._internal.models.target_python import TargetPython +from pip._internal.utils.hashes import STRONG_HASHES +from pip._internal.utils.misc import strtobool + + +def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None: + """ + Raise an option parsing error using parser.error(). + + Args: + parser: an OptionParser instance. + option: an Option instance. + msg: the error text. + """ + msg = f"{option} error: {msg}" + msg = textwrap.fill(" ".join(msg.split())) + parser.error(msg) + + +def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup: + """ + Return an OptionGroup object + group -- assumed to be dict with 'name' and 'options' keys + parser -- an optparse Parser + """ + option_group = OptionGroup(parser, group["name"]) + for option in group["options"]: + option_group.add_option(option()) + return option_group + + +def check_install_build_global( + options: Values, check_options: Optional[Values] = None +) -> None: + """Disable wheels if per-setup.py call options are set. + + :param options: The OptionParser options to update. + :param check_options: The options to check, if not supplied defaults to + options. + """ + if check_options is None: + check_options = options + + def getname(n: str) -> Optional[Any]: + return getattr(check_options, n, None) + + names = ["build_options", "global_options", "install_options"] + if any(map(getname, names)): + control = options.format_control + control.disallow_binaries() + warnings.warn( + "Disabling all use of wheels due to the use of --build-option " + "/ --global-option / --install-option.", + stacklevel=2, + ) + + +def check_dist_restriction(options: Values, check_target: bool = False) -> None: + """Function for determining if custom platform options are allowed. + + :param options: The OptionParser options. + :param check_target: Whether or not to check if --target is being used. + """ + dist_restriction_set = any( + [ + options.python_version, + options.platforms, + options.abis, + options.implementation, + ] + ) + + binary_only = FormatControl(set(), {":all:"}) + sdist_dependencies_allowed = ( + options.format_control != binary_only and not options.ignore_dependencies + ) + + # Installations or downloads using dist restrictions must not combine + # source distributions and dist-specific wheels, as they are not + # guaranteed to be locally compatible. + if dist_restriction_set and sdist_dependencies_allowed: + raise CommandError( + "When restricting platform and interpreter constraints using " + "--python-version, --platform, --abi, or --implementation, " + "either --no-deps must be set, or --only-binary=:all: must be " + "set and --no-binary must not be set (or must be set to " + ":none:)." + ) + + if check_target: + if dist_restriction_set and not options.target_dir: + raise CommandError( + "Can not use any platform or abi specific options unless " + "installing via '--target'" + ) + + +def _path_option_check(option: Option, opt: str, value: str) -> str: + return os.path.expanduser(value) + + +def _package_name_option_check(option: Option, opt: str, value: str) -> str: + return canonicalize_name(value) + + +class PipOption(Option): + TYPES = Option.TYPES + ("path", "package_name") + TYPE_CHECKER = Option.TYPE_CHECKER.copy() + TYPE_CHECKER["package_name"] = _package_name_option_check + TYPE_CHECKER["path"] = _path_option_check + + +########### +# options # +########### + +help_: Callable[..., Option] = partial( + Option, + "-h", + "--help", + dest="help", + action="help", + help="Show help.", +) + +debug_mode: Callable[..., Option] = partial( + Option, + "--debug", + dest="debug_mode", + action="store_true", + default=False, + help=( + "Let unhandled exceptions propagate outside the main subroutine, " + "instead of logging them to stderr." + ), +) + +isolated_mode: Callable[..., Option] = partial( + Option, + "--isolated", + dest="isolated_mode", + action="store_true", + default=False, + help=( + "Run pip in an isolated mode, ignoring environment variables and user " + "configuration." + ), +) + +require_virtualenv: Callable[..., Option] = partial( + Option, + # Run only if inside a virtualenv, bail if not. + "--require-virtualenv", + "--require-venv", + dest="require_venv", + action="store_true", + default=False, + help=SUPPRESS_HELP, +) + +verbose: Callable[..., Option] = partial( + Option, + "-v", + "--verbose", + dest="verbose", + action="count", + default=0, + help="Give more output. Option is additive, and can be used up to 3 times.", +) + +no_color: Callable[..., Option] = partial( + Option, + "--no-color", + dest="no_color", + action="store_true", + default=False, + help="Suppress colored output.", +) + +version: Callable[..., Option] = partial( + Option, + "-V", + "--version", + dest="version", + action="store_true", + help="Show version and exit.", +) + +quiet: Callable[..., Option] = partial( + Option, + "-q", + "--quiet", + dest="quiet", + action="count", + default=0, + help=( + "Give less output. Option is additive, and can be used up to 3" + " times (corresponding to WARNING, ERROR, and CRITICAL logging" + " levels)." + ), +) + +progress_bar: Callable[..., Option] = partial( + Option, + "--progress-bar", + dest="progress_bar", + type="choice", + choices=list(BAR_TYPES.keys()), + default="on", + help=( + "Specify type of progress to be displayed [" + + "|".join(BAR_TYPES.keys()) + + "] (default: %default)" + ), +) + +log: Callable[..., Option] = partial( + PipOption, + "--log", + "--log-file", + "--local-log", + dest="log", + metavar="path", + type="path", + help="Path to a verbose appending log.", +) + +no_input: Callable[..., Option] = partial( + Option, + # Don't ask for input + "--no-input", + dest="no_input", + action="store_true", + default=False, + help="Disable prompting for input.", +) + +proxy: Callable[..., Option] = partial( + Option, + "--proxy", + dest="proxy", + type="str", + default="", + help="Specify a proxy in the form [user:passwd@]proxy.server:port.", +) + +retries: Callable[..., Option] = partial( + Option, + "--retries", + dest="retries", + type="int", + default=5, + help="Maximum number of retries each connection should attempt " + "(default %default times).", +) + +timeout: Callable[..., Option] = partial( + Option, + "--timeout", + "--default-timeout", + metavar="sec", + dest="timeout", + type="float", + default=15, + help="Set the socket timeout (default %default seconds).", +) + + +def exists_action() -> Option: + return Option( + # Option when path already exist + "--exists-action", + dest="exists_action", + type="choice", + choices=["s", "i", "w", "b", "a"], + default=[], + action="append", + metavar="action", + help="Default action when a path already exists: " + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", + ) + + +cert: Callable[..., Option] = partial( + PipOption, + "--cert", + dest="cert", + type="path", + metavar="path", + help=( + "Path to PEM-encoded CA certificate bundle. " + "If provided, overrides the default. " + "See 'SSL Certificate Verification' in pip documentation " + "for more information." + ), +) + +client_cert: Callable[..., Option] = partial( + PipOption, + "--client-cert", + dest="client_cert", + type="path", + default=None, + metavar="path", + help="Path to SSL client certificate, a single file containing the " + "private key and the certificate in PEM format.", +) + +index_url: Callable[..., Option] = partial( + Option, + "-i", + "--index-url", + "--pypi-url", + dest="index_url", + metavar="URL", + default=PyPI.simple_url, + help="Base URL of the Python Package Index (default %default). " + "This should point to a repository compliant with PEP 503 " + "(the simple repository API) or a local directory laid out " + "in the same format.", +) + + +def extra_index_url() -> Option: + return Option( + "--extra-index-url", + dest="extra_index_urls", + metavar="URL", + action="append", + default=[], + help="Extra URLs of package indexes to use in addition to " + "--index-url. Should follow the same rules as " + "--index-url.", + ) + + +no_index: Callable[..., Option] = partial( + Option, + "--no-index", + dest="no_index", + action="store_true", + default=False, + help="Ignore package index (only looking at --find-links URLs instead).", +) + + +def find_links() -> Option: + return Option( + "-f", + "--find-links", + dest="find_links", + action="append", + default=[], + metavar="url", + help="If a URL or path to an html file, then parse for links to " + "archives such as sdist (.tar.gz) or wheel (.whl) files. " + "If a local path or file:// URL that's a directory, " + "then look for archives in the directory listing. " + "Links to VCS project URLs are not supported.", + ) + + +def trusted_host() -> Option: + return Option( + "--trusted-host", + dest="trusted_hosts", + action="append", + metavar="HOSTNAME", + default=[], + help="Mark this host or host:port pair as trusted, even though it " + "does not have valid or any HTTPS.", + ) + + +def constraints() -> Option: + return Option( + "-c", + "--constraint", + dest="constraints", + action="append", + default=[], + metavar="file", + help="Constrain versions using the given constraints file. " + "This option can be used multiple times.", + ) + + +def requirements() -> Option: + return Option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help="Install from the given requirements file. " + "This option can be used multiple times.", + ) + + +def editable() -> Option: + return Option( + "-e", + "--editable", + dest="editables", + action="append", + default=[], + metavar="path/url", + help=( + "Install a project in editable mode (i.e. setuptools " + '"develop mode") from a local project path or a VCS url.' + ), + ) + + +def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None: + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + +src: Callable[..., Option] = partial( + PipOption, + "--src", + "--source", + "--source-dir", + "--source-directory", + dest="src_dir", + type="path", + metavar="dir", + default=get_src_prefix(), + action="callback", + callback=_handle_src, + help="Directory to check out editable projects into. " + 'The default in a virtualenv is "/src". ' + 'The default for global installs is "/src".', +) + + +def _get_format_control(values: Values, option: Option) -> Any: + """Get a format_control object.""" + return getattr(values, option.dest) + + +def _handle_no_binary( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, + existing.no_binary, + existing.only_binary, + ) + + +def _handle_only_binary( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, + existing.only_binary, + existing.no_binary, + ) + + +def no_binary() -> Option: + format_control = FormatControl(set(), set()) + return Option( + "--no-binary", + dest="format_control", + action="callback", + callback=_handle_no_binary, + type="str", + default=format_control, + help="Do not use binary packages. Can be supplied multiple times, and " + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all binary packages, ":none:" to empty the set (notice ' + "the colons), or one or more package names with commas between " + "them (no colons). Note that some packages are tricky to compile " + "and may fail to install when this option is used on them.", + ) + + +def only_binary() -> Option: + format_control = FormatControl(set(), set()) + return Option( + "--only-binary", + dest="format_control", + action="callback", + callback=_handle_only_binary, + type="str", + default=format_control, + help="Do not use source packages. Can be supplied multiple times, and " + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all source packages, ":none:" to empty the set, or one ' + "or more package names with commas between them. Packages " + "without binary distributions will fail to install when this " + "option is used on them.", + ) + + +platforms: Callable[..., Option] = partial( + Option, + "--platform", + dest="platforms", + metavar="platform", + action="append", + default=None, + help=( + "Only use wheels compatible with . Defaults to the " + "platform of the running system. Use this option multiple times to " + "specify multiple platforms supported by the target interpreter." + ), +) + + +# This was made a separate function for unit-testing purposes. +def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]: + """ + Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. + + :return: A 2-tuple (version_info, error_msg), where `error_msg` is + non-None if and only if there was a parsing error. + """ + if not value: + # The empty string is the same as not providing a value. + return (None, None) + + parts = value.split(".") + if len(parts) > 3: + return ((), "at most three version parts are allowed") + + if len(parts) == 1: + # Then we are in the case of "3" or "37". + value = parts[0] + if len(value) > 1: + parts = [value[0], value[1:]] + + try: + version_info = tuple(int(part) for part in parts) + except ValueError: + return ((), "each version part must be an integer") + + return (version_info, None) + + +def _handle_python_version( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + """ + Handle a provided --python-version value. + """ + version_info, error_msg = _convert_python_version(value) + if error_msg is not None: + msg = "invalid --python-version value: {!r}: {}".format( + value, + error_msg, + ) + raise_option_error(parser, option=option, msg=msg) + + parser.values.python_version = version_info + + +python_version: Callable[..., Option] = partial( + Option, + "--python-version", + dest="python_version", + metavar="python_version", + action="callback", + callback=_handle_python_version, + type="str", + default=None, + help=dedent( + """\ + The Python interpreter version to use for wheel and "Requires-Python" + compatibility checks. Defaults to a version derived from the running + interpreter. The version can be specified using up to three dot-separated + integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor + version can also be given as a string without dots (e.g. "37" for 3.7.0). + """ + ), +) + + +implementation: Callable[..., Option] = partial( + Option, + "--implementation", + dest="implementation", + metavar="implementation", + default=None, + help=( + "Only use wheels compatible with Python " + "implementation , e.g. 'pp', 'jy', 'cp', " + " or 'ip'. If not specified, then the current " + "interpreter implementation is used. Use 'py' to force " + "implementation-agnostic wheels." + ), +) + + +abis: Callable[..., Option] = partial( + Option, + "--abi", + dest="abis", + metavar="abi", + action="append", + default=None, + help=( + "Only use wheels compatible with Python abi , e.g. 'pypy_41'. " + "If not specified, then the current interpreter abi tag is used. " + "Use this option multiple times to specify multiple abis supported " + "by the target interpreter. Generally you will need to specify " + "--implementation, --platform, and --python-version when using this " + "option." + ), +) + + +def add_target_python_options(cmd_opts: OptionGroup) -> None: + cmd_opts.add_option(platforms()) + cmd_opts.add_option(python_version()) + cmd_opts.add_option(implementation()) + cmd_opts.add_option(abis()) + + +def make_target_python(options: Values) -> TargetPython: + target_python = TargetPython( + platforms=options.platforms, + py_version_info=options.python_version, + abis=options.abis, + implementation=options.implementation, + ) + + return target_python + + +def prefer_binary() -> Option: + return Option( + "--prefer-binary", + dest="prefer_binary", + action="store_true", + default=False, + help="Prefer older binary packages over newer source packages.", + ) + + +cache_dir: Callable[..., Option] = partial( + PipOption, + "--cache-dir", + dest="cache_dir", + default=USER_CACHE_DIR, + metavar="dir", + type="path", + help="Store the cache data in .", +) + + +def _handle_no_cache_dir( + option: Option, opt: str, value: str, parser: OptionParser +) -> None: + """ + Process a value provided for the --no-cache-dir option. + + This is an optparse.Option callback for the --no-cache-dir option. + """ + # The value argument will be None if --no-cache-dir is passed via the + # command-line, since the option doesn't accept arguments. However, + # the value can be non-None if the option is triggered e.g. by an + # environment variable, like PIP_NO_CACHE_DIR=true. + if value is not None: + # Then parse the string value to get argument error-checking. + try: + strtobool(value) + except ValueError as exc: + raise_option_error(parser, option=option, msg=str(exc)) + + # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() + # converted to 0 (like "false" or "no") caused cache_dir to be disabled + # rather than enabled (logic would say the latter). Thus, we disable + # the cache directory not just on values that parse to True, but (for + # backwards compatibility reasons) also on values that parse to False. + # In other words, always set it to False if the option is provided in + # some (valid) form. + parser.values.cache_dir = False + + +no_cache: Callable[..., Option] = partial( + Option, + "--no-cache-dir", + dest="cache_dir", + action="callback", + callback=_handle_no_cache_dir, + help="Disable the cache.", +) + +no_deps: Callable[..., Option] = partial( + Option, + "--no-deps", + "--no-dependencies", + dest="ignore_dependencies", + action="store_true", + default=False, + help="Don't install package dependencies.", +) + +ignore_requires_python: Callable[..., Option] = partial( + Option, + "--ignore-requires-python", + dest="ignore_requires_python", + action="store_true", + help="Ignore the Requires-Python information.", +) + +no_build_isolation: Callable[..., Option] = partial( + Option, + "--no-build-isolation", + dest="build_isolation", + action="store_false", + default=True, + help="Disable isolation when building a modern source distribution. " + "Build dependencies specified by PEP 518 must be already installed " + "if this option is used.", +) + + +def _handle_no_use_pep517( + option: Option, opt: str, value: str, parser: OptionParser +) -> None: + """ + Process a value provided for the --no-use-pep517 option. + + This is an optparse.Option callback for the no_use_pep517 option. + """ + # Since --no-use-pep517 doesn't accept arguments, the value argument + # will be None if --no-use-pep517 is passed via the command-line. + # However, the value can be non-None if the option is triggered e.g. + # by an environment variable, for example "PIP_NO_USE_PEP517=true". + if value is not None: + msg = """A value was passed for --no-use-pep517, + probably using either the PIP_NO_USE_PEP517 environment variable + or the "no-use-pep517" config file option. Use an appropriate value + of the PIP_USE_PEP517 environment variable or the "use-pep517" + config file option instead. + """ + raise_option_error(parser, option=option, msg=msg) + + # Otherwise, --no-use-pep517 was passed via the command-line. + parser.values.use_pep517 = False + + +use_pep517: Any = partial( + Option, + "--use-pep517", + dest="use_pep517", + action="store_true", + default=None, + help="Use PEP 517 for building source distributions " + "(use --no-use-pep517 to force legacy behaviour).", +) + +no_use_pep517: Any = partial( + Option, + "--no-use-pep517", + dest="use_pep517", + action="callback", + callback=_handle_no_use_pep517, + default=None, + help=SUPPRESS_HELP, +) + +install_options: Callable[..., Option] = partial( + Option, + "--install-option", + dest="install_options", + action="append", + metavar="options", + help="Extra arguments to be supplied to the setup.py install " + 'command (use like --install-option="--install-scripts=/usr/local/' + 'bin"). Use multiple --install-option options to pass multiple ' + "options to setup.py install. If you are using an option with a " + "directory path, be sure to use absolute path.", +) + +build_options: Callable[..., Option] = partial( + Option, + "--build-option", + dest="build_options", + metavar="options", + action="append", + help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", +) + +global_options: Callable[..., Option] = partial( + Option, + "--global-option", + dest="global_options", + action="append", + metavar="options", + help="Extra global options to be supplied to the setup.py " + "call before the install or bdist_wheel command.", +) + +no_clean: Callable[..., Option] = partial( + Option, + "--no-clean", + action="store_true", + default=False, + help="Don't clean up build directories.", +) + +pre: Callable[..., Option] = partial( + Option, + "--pre", + action="store_true", + default=False, + help="Include pre-release and development versions. By default, " + "pip only finds stable versions.", +) + +disable_pip_version_check: Callable[..., Option] = partial( + Option, + "--disable-pip-version-check", + dest="disable_pip_version_check", + action="store_true", + default=False, + help="Don't periodically check PyPI to determine whether a new version " + "of pip is available for download. Implied with --no-index.", +) + + +def _handle_merge_hash( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + """Given a value spelled "algo:digest", append the digest to a list + pointed to in a dict by the algo name.""" + if not parser.values.hashes: + parser.values.hashes = {} + try: + algo, digest = value.split(":", 1) + except ValueError: + parser.error( + "Arguments to {} must be a hash name " # noqa + "followed by a value, like --hash=sha256:" + "abcde...".format(opt_str) + ) + if algo not in STRONG_HASHES: + parser.error( + "Allowed hash algorithms for {} are {}.".format( # noqa + opt_str, ", ".join(STRONG_HASHES) + ) + ) + parser.values.hashes.setdefault(algo, []).append(digest) + + +hash: Callable[..., Option] = partial( + Option, + "--hash", + # Hash values eventually end up in InstallRequirement.hashes due to + # __dict__ copying in process_line(). + dest="hashes", + action="callback", + callback=_handle_merge_hash, + type="string", + help="Verify that the package's archive matches this " + "hash before installing. Example: --hash=sha256:abcdef...", +) + + +require_hashes: Callable[..., Option] = partial( + Option, + "--require-hashes", + dest="require_hashes", + action="store_true", + default=False, + help="Require a hash to check each requirement against, for " + "repeatable installs. This option is implied when any package in a " + "requirements file has a --hash option.", +) + + +list_path: Callable[..., Option] = partial( + PipOption, + "--path", + dest="path", + type="path", + action="append", + help="Restrict to the specified installation path for listing " + "packages (can be used multiple times).", +) + + +def check_list_path_option(options: Values) -> None: + if options.path and (options.user or options.local): + raise CommandError("Cannot combine '--path' with '--user' or '--local'") + + +list_exclude: Callable[..., Option] = partial( + PipOption, + "--exclude", + dest="excludes", + action="append", + metavar="package", + type="package_name", + help="Exclude specified package from the output", +) + + +no_python_version_warning: Callable[..., Option] = partial( + Option, + "--no-python-version-warning", + dest="no_python_version_warning", + action="store_true", + default=False, + help="Silence deprecation warnings for upcoming unsupported Pythons.", +) + + +use_new_feature: Callable[..., Option] = partial( + Option, + "--use-feature", + dest="features_enabled", + metavar="feature", + action="append", + default=[], + choices=["2020-resolver", "fast-deps", "in-tree-build"], + help="Enable new functionality, that may be backward incompatible.", +) + +use_deprecated_feature: Callable[..., Option] = partial( + Option, + "--use-deprecated", + dest="deprecated_features_enabled", + metavar="feature", + action="append", + default=[], + choices=["legacy-resolver", "out-of-tree-build"], + help=("Enable deprecated functionality, that will be removed in the future."), +) + + +########## +# groups # +########## + +general_group: Dict[str, Any] = { + "name": "General Options", + "options": [ + help_, + debug_mode, + isolated_mode, + require_virtualenv, + verbose, + version, + quiet, + log, + no_input, + proxy, + retries, + timeout, + exists_action, + trusted_host, + cert, + client_cert, + cache_dir, + no_cache, + disable_pip_version_check, + no_color, + no_python_version_warning, + use_new_feature, + use_deprecated_feature, + ], +} + +index_group: Dict[str, Any] = { + "name": "Package Index Options", + "options": [ + index_url, + extra_index_url, + no_index, + find_links, + ], +} diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/command_context.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/command_context.py new file mode 100644 index 0000000..ed68322 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/command_context.py @@ -0,0 +1,27 @@ +from contextlib import ExitStack, contextmanager +from typing import ContextManager, Iterator, TypeVar + +_T = TypeVar("_T", covariant=True) + + +class CommandContextMixIn: + def __init__(self) -> None: + super().__init__() + self._in_main_context = False + self._main_context = ExitStack() + + @contextmanager + def main_context(self) -> Iterator[None]: + assert not self._in_main_context + + self._in_main_context = True + try: + with self._main_context: + yield + finally: + self._in_main_context = False + + def enter_context(self, context_provider: ContextManager[_T]) -> _T: + assert self._in_main_context + + return self._main_context.enter_context(context_provider) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/main.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/main.py new file mode 100644 index 0000000..0e31221 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/main.py @@ -0,0 +1,70 @@ +"""Primary application entrypoint. +""" +import locale +import logging +import os +import sys +from typing import List, Optional + +from pip._internal.cli.autocompletion import autocomplete +from pip._internal.cli.main_parser import parse_command +from pip._internal.commands import create_command +from pip._internal.exceptions import PipError +from pip._internal.utils import deprecation + +logger = logging.getLogger(__name__) + + +# Do not import and use main() directly! Using it directly is actively +# discouraged by pip's maintainers. The name, location and behavior of +# this function is subject to change, so calling it directly is not +# portable across different pip versions. + +# In addition, running pip in-process is unsupported and unsafe. This is +# elaborated in detail at +# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. +# That document also provides suggestions that should work for nearly +# all users that are considering importing and using main() directly. + +# However, we know that certain users will still want to invoke pip +# in-process. If you understand and accept the implications of using pip +# in an unsupported manner, the best approach is to use runpy to avoid +# depending on the exact location of this entry point. + +# The following example shows how to use runpy to invoke pip in that +# case: +# +# sys.argv = ["pip", your, args, here] +# runpy.run_module("pip", run_name="__main__") +# +# Note that this will exit the process after running, unlike a direct +# call to main. As it is not safe to do any processing after calling +# main, this should not be an issue in practice. + + +def main(args: Optional[List[str]] = None) -> int: + if args is None: + args = sys.argv[1:] + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parse_command(args) + except PipError as exc: + sys.stderr.write(f"ERROR: {exc}") + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip._internal.utils.encoding.auto_decode + try: + locale.setlocale(locale.LC_ALL, "") + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) + command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) + + return command.main(cmd_args) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/main_parser.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/main_parser.py new file mode 100644 index 0000000..3666ab0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/main_parser.py @@ -0,0 +1,87 @@ +"""A single place for constructing and exposing the main parser +""" + +import os +import sys +from typing import List, Tuple + +from pip._internal.cli import cmdoptions +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip._internal.commands import commands_dict, get_similar_commands +from pip._internal.exceptions import CommandError +from pip._internal.utils.misc import get_pip_version, get_prog + +__all__ = ["create_main_parser", "parse_command"] + + +def create_main_parser() -> ConfigOptionParser: + """Creates and returns the main parser for pip's CLI""" + + parser = ConfigOptionParser( + usage="\n%prog [options]", + add_help_option=False, + formatter=UpdatingDefaultsHelpFormatter(), + name="global", + prog=get_prog(), + ) + parser.disable_interspersed_args() + + parser.version = get_pip_version() + + # add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) + parser.add_option_group(gen_opts) + + # so the help formatter knows + parser.main = True # type: ignore + + # create command listing for description + description = [""] + [ + f"{name:27} {command_info.summary}" + for name, command_info in commands_dict.items() + ] + parser.description = "\n".join(description) + + return parser + + +def parse_command(args: List[str]) -> Tuple[str, List[str]]: + parser = create_main_parser() + + # Note: parser calls disable_interspersed_args(), so the result of this + # call is to split the initial args into the general options before the + # subcommand and everything else. + # For example: + # args: ['--timeout=5', 'install', '--user', 'INITools'] + # general_options: ['--timeout==5'] + # args_else: ['install', '--user', 'INITools'] + general_options, args_else = parser.parse_args(args) + + # --version + if general_options.version: + sys.stdout.write(parser.version) + sys.stdout.write(os.linesep) + sys.exit() + + # pip || pip help -> print_help() + if not args_else or (args_else[0] == "help" and len(args_else) == 1): + parser.print_help() + sys.exit() + + # the subcommand name + cmd_name = args_else[0] + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = [f'unknown command "{cmd_name}"'] + if guess: + msg.append(f'maybe you meant "{guess}"') + + raise CommandError(" - ".join(msg)) + + # all the args without the subcommand + cmd_args = args[:] + cmd_args.remove(cmd_name) + + return cmd_name, cmd_args diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/parser.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/parser.py new file mode 100644 index 0000000..a1c99a8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/parser.py @@ -0,0 +1,292 @@ +"""Base option parser setup""" + +import logging +import optparse +import shutil +import sys +import textwrap +from contextlib import suppress +from typing import Any, Dict, Iterator, List, Tuple + +from pip._internal.cli.status_codes import UNKNOWN_ERROR +from pip._internal.configuration import Configuration, ConfigurationError +from pip._internal.utils.misc import redact_auth_from_url, strtobool + +logger = logging.getLogger(__name__) + + +class PrettyHelpFormatter(optparse.IndentedHelpFormatter): + """A prettier/less verbose help formatter for optparse.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # help position must be aligned with __init__.parseopts.description + kwargs["max_help_position"] = 30 + kwargs["indent_increment"] = 1 + kwargs["width"] = shutil.get_terminal_size()[0] - 2 + super().__init__(*args, **kwargs) + + def format_option_strings(self, option: optparse.Option) -> str: + return self._format_option_strings(option) + + def _format_option_strings( + self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", " + ) -> str: + """ + Return a comma-separated list of option strings and metavars. + + :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') + :param mvarfmt: metavar format string + :param optsep: separator + """ + opts = [] + + if option._short_opts: + opts.append(option._short_opts[0]) + if option._long_opts: + opts.append(option._long_opts[0]) + if len(opts) > 1: + opts.insert(1, optsep) + + if option.takes_value(): + assert option.dest is not None + metavar = option.metavar or option.dest.lower() + opts.append(mvarfmt.format(metavar.lower())) + + return "".join(opts) + + def format_heading(self, heading: str) -> str: + if heading == "Options": + return "" + return heading + ":\n" + + def format_usage(self, usage: str) -> str: + """ + Ensure there is only one newline between usage and the first heading + if there is no description. + """ + msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " ")) + return msg + + def format_description(self, description: str) -> str: + # leave full control over description to us + if description: + if hasattr(self.parser, "main"): + label = "Commands" + else: + label = "Description" + # some doc strings have initial newlines, some don't + description = description.lstrip("\n") + # some doc strings have final newlines and spaces, some don't + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = f"{label}:\n{description}\n" + return description + else: + return "" + + def format_epilog(self, epilog: str) -> str: + # leave full control over epilog to us + if epilog: + return epilog + else: + return "" + + def indent_lines(self, text: str, indent: str) -> str: + new_lines = [indent + line for line in text.split("\n")] + return "\n".join(new_lines) + + +class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): + """Custom help formatter for use in ConfigOptionParser. + + This is updates the defaults before expanding them, allowing + them to show up correctly in the help listing. + + Also redact auth from url type options + """ + + def expand_default(self, option: optparse.Option) -> str: + default_values = None + if self.parser is not None: + assert isinstance(self.parser, ConfigOptionParser) + self.parser._update_defaults(self.parser.defaults) + assert option.dest is not None + default_values = self.parser.defaults.get(option.dest) + help_text = super().expand_default(option) + + if default_values and option.metavar == "URL": + if isinstance(default_values, str): + default_values = [default_values] + + # If its not a list, we should abort and just return the help text + if not isinstance(default_values, list): + default_values = [] + + for val in default_values: + help_text = help_text.replace(val, redact_auth_from_url(val)) + + return help_text + + +class CustomOptionParser(optparse.OptionParser): + def insert_option_group( + self, idx: int, *args: Any, **kwargs: Any + ) -> optparse.OptionGroup: + """Insert an OptionGroup at a given position.""" + group = self.add_option_group(*args, **kwargs) + + self.option_groups.pop() + self.option_groups.insert(idx, group) + + return group + + @property + def option_list_all(self) -> List[optparse.Option]: + """Get a list of all options, including those in option groups.""" + res = self.option_list[:] + for i in self.option_groups: + res.extend(i.option_list) + + return res + + +class ConfigOptionParser(CustomOptionParser): + """Custom option parser which updates its defaults by checking the + configuration files and environmental variables""" + + def __init__( + self, + *args: Any, + name: str, + isolated: bool = False, + **kwargs: Any, + ) -> None: + self.name = name + self.config = Configuration(isolated) + + assert self.name + super().__init__(*args, **kwargs) + + def check_default(self, option: optparse.Option, key: str, val: Any) -> Any: + try: + return option.check_value(key, val) + except optparse.OptionValueError as exc: + print(f"An error occurred during configuration: {exc}") + sys.exit(3) + + def _get_ordered_configuration_items(self) -> Iterator[Tuple[str, Any]]: + # Configuration gives keys in an unordered manner. Order them. + override_order = ["global", self.name, ":env:"] + + # Pool the options into different groups + section_items: Dict[str, List[Tuple[str, Any]]] = { + name: [] for name in override_order + } + for section_key, val in self.config.items(): + # ignore empty values + if not val: + logger.debug( + "Ignoring configuration key '%s' as it's value is empty.", + section_key, + ) + continue + + section, key = section_key.split(".", 1) + if section in override_order: + section_items[section].append((key, val)) + + # Yield each group in their override order + for section in override_order: + for key, val in section_items[section]: + yield key, val + + def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: + """Updates the given defaults with values from the config files and + the environ. Does a little special handling for certain types of + options (lists).""" + + # Accumulate complex default state. + self.values = optparse.Values(self.defaults) + late_eval = set() + # Then set the options with those values + for key, val in self._get_ordered_configuration_items(): + # '--' because configuration supports only long names + option = self.get_option("--" + key) + + # Ignore options not present in this parser. E.g. non-globals put + # in [global] by users that want them to apply to all applicable + # commands. + if option is None: + continue + + assert option.dest is not None + + if option.action in ("store_true", "store_false"): + try: + val = strtobool(val) + except ValueError: + self.error( + "{} is not a valid value for {} option, " # noqa + "please specify a boolean value like yes/no, " + "true/false or 1/0 instead.".format(val, key) + ) + elif option.action == "count": + with suppress(ValueError): + val = strtobool(val) + with suppress(ValueError): + val = int(val) + if not isinstance(val, int) or val < 0: + self.error( + "{} is not a valid value for {} option, " # noqa + "please instead specify either a non-negative integer " + "or a boolean value like yes/no or false/true " + "which is equivalent to 1/0.".format(val, key) + ) + elif option.action == "append": + val = val.split() + val = [self.check_default(option, key, v) for v in val] + elif option.action == "callback": + assert option.callback is not None + late_eval.add(option.dest) + opt_str = option.get_opt_string() + val = option.convert_value(opt_str, val) + # From take_action + args = option.callback_args or () + kwargs = option.callback_kwargs or {} + option.callback(option, opt_str, val, self, *args, **kwargs) + else: + val = self.check_default(option, key, val) + + defaults[option.dest] = val + + for key in late_eval: + defaults[key] = getattr(self.values, key) + self.values = None + return defaults + + def get_default_values(self) -> optparse.Values: + """Overriding to make updating the defaults after instantiation of + the option parser possible, _update_defaults() does the dirty work.""" + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return optparse.Values(self.defaults) + + # Load the configuration, or error out in case of an error + try: + self.config.load() + except ConfigurationError as err: + self.exit(UNKNOWN_ERROR, str(err)) + + defaults = self._update_defaults(self.defaults.copy()) # ours + for option in self._get_all_options(): + assert option.dest is not None + default = defaults.get(option.dest) + if isinstance(default, str): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) + + def error(self, msg: str) -> None: + self.print_usage(sys.stderr) + self.exit(UNKNOWN_ERROR, f"{msg}\n") diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/progress_bars.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/progress_bars.py new file mode 100644 index 0000000..f3db295 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/progress_bars.py @@ -0,0 +1,250 @@ +import itertools +import sys +from signal import SIGINT, default_int_handler, signal +from typing import Any + +from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar +from pip._vendor.progress.spinner import Spinner + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation +from pip._internal.utils.misc import format_size + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + + +def _select_progress_class(preferred: Bar, fallback: Bar) -> Bar: + encoding = getattr(preferred.file, "encoding", None) + + # If we don't know what encoding this file is in, then we'll just assume + # that it doesn't support unicode and use the ASCII bar. + if not encoding: + return fallback + + # Collect all of the possible characters we want to use with the preferred + # bar. + characters = [ + getattr(preferred, "empty_fill", ""), + getattr(preferred, "fill", ""), + ] + characters += list(getattr(preferred, "phases", [])) + + # Try to decode the characters we're using for the bar using the encoding + # of the given file, if this works then we'll assume that we can use the + # fancier bar and if not we'll fall back to the plaintext bar. + try: + "".join(characters).encode(encoding) + except UnicodeEncodeError: + return fallback + else: + return preferred + + +_BaseBar: Any = _select_progress_class(IncrementalBar, Bar) + + +class InterruptibleMixin: + """ + Helper to ensure that self.finish() gets called on keyboard interrupt. + + This allows downloads to be interrupted without leaving temporary state + (like hidden cursors) behind. + + This class is similar to the progress library's existing SigIntMixin + helper, but as of version 1.2, that helper has the following problems: + + 1. It calls sys.exit(). + 2. It discards the existing SIGINT handler completely. + 3. It leaves its own handler in place even after an uninterrupted finish, + which will have unexpected delayed effects if the user triggers an + unrelated keyboard interrupt some time after a progress-displaying + download has already completed, for example. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """ + Save the original SIGINT handler for later. + """ + # https://github.com/python/mypy/issues/5887 + super().__init__(*args, **kwargs) # type: ignore + + self.original_handler = signal(SIGINT, self.handle_sigint) + + # If signal() returns None, the previous handler was not installed from + # Python, and we cannot restore it. This probably should not happen, + # but if it does, we must restore something sensible instead, at least. + # The least bad option should be Python's default SIGINT handler, which + # just raises KeyboardInterrupt. + if self.original_handler is None: + self.original_handler = default_int_handler + + def finish(self) -> None: + """ + Restore the original SIGINT handler after finishing. + + This should happen regardless of whether the progress display finishes + normally, or gets interrupted. + """ + super().finish() # type: ignore + signal(SIGINT, self.original_handler) + + def handle_sigint(self, signum, frame): # type: ignore + """ + Call self.finish() before delegating to the original SIGINT handler. + + This handler should only be in place while the progress display is + active. + """ + self.finish() + self.original_handler(signum, frame) + + +class SilentBar(Bar): + def update(self) -> None: + pass + + +class BlueEmojiBar(IncrementalBar): + + suffix = "%(percent)d%%" + bar_prefix = " " + bar_suffix = " " + phases = ("\U0001F539", "\U0001F537", "\U0001F535") + + +class DownloadProgressMixin: + def __init__(self, *args: Any, **kwargs: Any) -> None: + # https://github.com/python/mypy/issues/5887 + super().__init__(*args, **kwargs) # type: ignore + self.message: str = (" " * (get_indentation() + 2)) + self.message + + @property + def downloaded(self) -> str: + return format_size(self.index) # type: ignore + + @property + def download_speed(self) -> str: + # Avoid zero division errors... + if self.avg == 0.0: # type: ignore + return "..." + return format_size(1 / self.avg) + "/s" # type: ignore + + @property + def pretty_eta(self) -> str: + if self.eta: # type: ignore + return f"eta {self.eta_td}" # type: ignore + return "" + + def iter(self, it): # type: ignore + for x in it: + yield x + # B305 is incorrectly raised here + # https://github.com/PyCQA/flake8-bugbear/issues/59 + self.next(len(x)) # noqa: B305 + self.finish() + + +class WindowsMixin: + def __init__(self, *args: Any, **kwargs: Any) -> None: + # The Windows terminal does not support the hide/show cursor ANSI codes + # even with colorama. So we'll ensure that hide_cursor is False on + # Windows. + # This call needs to go before the super() call, so that hide_cursor + # is set in time. The base progress bar class writes the "hide cursor" + # code to the terminal in its init, so if we don't set this soon + # enough, we get a "hide" with no corresponding "show"... + if WINDOWS and self.hide_cursor: # type: ignore + self.hide_cursor = False + + # https://github.com/python/mypy/issues/5887 + super().__init__(*args, **kwargs) # type: ignore + + # Check if we are running on Windows and we have the colorama module, + # if we do then wrap our file with it. + if WINDOWS and colorama: + self.file = colorama.AnsiToWin32(self.file) # type: ignore + # The progress code expects to be able to call self.file.isatty() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.isatty = lambda: self.file.wrapped.isatty() + # The progress code expects to be able to call self.file.flush() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.flush = lambda: self.file.wrapped.flush() + + +class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin): + + file = sys.stdout + message = "%(percent)d%%" + suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" + + +class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar): + pass + + +class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): + pass + + +class DownloadBar(BaseDownloadProgressBar, Bar): + pass + + +class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar): + pass + + +class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar): + pass + + +class DownloadProgressSpinner( + WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner +): + + file = sys.stdout + suffix = "%(downloaded)s %(download_speed)s" + + def next_phase(self) -> str: + if not hasattr(self, "_phaser"): + self._phaser = itertools.cycle(self.phases) + return next(self._phaser) + + def update(self) -> None: + message = self.message % self + phase = self.next_phase() + suffix = self.suffix % self + line = "".join( + [ + message, + " " if message else "", + phase, + " " if suffix else "", + suffix, + ] + ) + + self.writeln(line) + + +BAR_TYPES = { + "off": (DownloadSilentBar, DownloadSilentBar), + "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), + "ascii": (DownloadBar, DownloadProgressSpinner), + "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), + "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner), +} + + +def DownloadProgressProvider(progress_bar, max=None): # type: ignore + if max is None or max == 0: + return BAR_TYPES[progress_bar][1]().iter + else: + return BAR_TYPES[progress_bar][0](max=max).iter diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/req_command.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/req_command.py new file mode 100644 index 0000000..dbd15cb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/req_command.py @@ -0,0 +1,469 @@ +"""Contains the Command base classes that depend on PipSession. + +The classes in this module are in a separate module so the commands not +needing download / PackageFinder capability don't unnecessarily import the +PackageFinder machinery and all its vendored dependencies, etc. +""" + +import logging +import os +import sys +from functools import partial +from optparse import Values +from typing import Any, List, Optional, Tuple + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.exceptions import CommandError, PreviousBuildDirError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.network.session import PipSession +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, + install_req_from_parsed_requirement, + install_req_from_req_string, +) +from pip._internal.req.req_file import parse_requirements +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import RequirementTracker +from pip._internal.resolution.base import BaseResolver +from pip._internal.self_outdated_check import pip_self_version_check +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.temp_dir import ( + TempDirectory, + TempDirectoryTypeRegistry, + tempdir_kinds, +) +from pip._internal.utils.virtualenv import running_under_virtualenv + +logger = logging.getLogger(__name__) + + +class SessionCommandMixin(CommandContextMixIn): + + """ + A class mixin for command classes needing _build_session(). + """ + + def __init__(self) -> None: + super().__init__() + self._session: Optional[PipSession] = None + + @classmethod + def _get_index_urls(cls, options: Values) -> Optional[List[str]]: + """Return a list of index urls from user-provided options.""" + index_urls = [] + if not getattr(options, "no_index", False): + url = getattr(options, "index_url", None) + if url: + index_urls.append(url) + urls = getattr(options, "extra_index_urls", None) + if urls: + index_urls.extend(urls) + # Return None rather than an empty list + return index_urls or None + + def get_default_session(self, options: Values) -> PipSession: + """Get a default-managed session.""" + if self._session is None: + self._session = self.enter_context(self._build_session(options)) + # there's no type annotation on requests.Session, so it's + # automatically ContextManager[Any] and self._session becomes Any, + # then https://github.com/python/mypy/issues/7696 kicks in + assert self._session is not None + return self._session + + def _build_session( + self, + options: Values, + retries: Optional[int] = None, + timeout: Optional[int] = None, + ) -> PipSession: + assert not options.cache_dir or os.path.isabs(options.cache_dir) + session = PipSession( + cache=( + os.path.join(options.cache_dir, "http") if options.cache_dir else None + ), + retries=retries if retries is not None else options.retries, + trusted_hosts=options.trusted_hosts, + index_urls=self._get_index_urls(options), + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = timeout if timeout is not None else options.timeout + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + + return session + + +class IndexGroupCommand(Command, SessionCommandMixin): + + """ + Abstract base class for commands with the index_group options. + + This also corresponds to the commands that permit the pip version check. + """ + + def handle_pip_version_check(self, options: Values) -> None: + """ + Do the pip version check if not disabled. + + This overrides the default behavior of not doing the check. + """ + # Make sure the index_group options are present. + assert hasattr(options, "no_index") + + if options.disable_pip_version_check or options.no_index: + return + + # Otherwise, check if we're using the latest version of pip available. + session = self._build_session( + options, retries=0, timeout=min(5, options.timeout) + ) + with session: + pip_self_version_check(session, options) + + +KEEPABLE_TEMPDIR_TYPES = [ + tempdir_kinds.BUILD_ENV, + tempdir_kinds.EPHEM_WHEEL_CACHE, + tempdir_kinds.REQ_BUILD, +] + + +def warn_if_run_as_root() -> None: + """Output a warning for sudo users on Unix. + + In a virtual environment, sudo pip still writes to virtualenv. + On Windows, users may run pip as Administrator without issues. + This warning only applies to Unix root users outside of virtualenv. + """ + if running_under_virtualenv(): + return + if not hasattr(os, "getuid"): + return + # On Windows, there are no "system managed" Python packages. Installing as + # Administrator via pip is the correct way of updating system environments. + # + # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform + # checks: https://mypy.readthedocs.io/en/stable/common_issues.html + if sys.platform == "win32" or sys.platform == "cygwin": + return + + if os.getuid() != 0: + return + + logger.warning( + "Running pip as the 'root' user can result in broken permissions and " + "conflicting behaviour with the system package manager. " + "It is recommended to use a virtual environment instead: " + "https://pip.pypa.io/warnings/venv" + ) + + +def with_cleanup(func: Any) -> Any: + """Decorator for common logic related to managing temporary + directories. + """ + + def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None: + for t in KEEPABLE_TEMPDIR_TYPES: + registry.set_delete(t, False) + + def wrapper( + self: RequirementCommand, options: Values, args: List[Any] + ) -> Optional[int]: + assert self.tempdir_registry is not None + if options.no_clean: + configure_tempdir_registry(self.tempdir_registry) + + try: + return func(self, options, args) + except PreviousBuildDirError: + # This kind of conflict can occur when the user passes an explicit + # build directory with a pre-existing folder. In that case we do + # not want to accidentally remove it. + configure_tempdir_registry(self.tempdir_registry) + raise + + return wrapper + + +class RequirementCommand(IndexGroupCommand): + def __init__(self, *args: Any, **kw: Any) -> None: + super().__init__(*args, **kw) + + self.cmd_opts.add_option(cmdoptions.no_clean()) + + @staticmethod + def determine_resolver_variant(options: Values) -> str: + """Determines which resolver should be used, based on the given options.""" + if "legacy-resolver" in options.deprecated_features_enabled: + return "legacy" + + return "2020-resolver" + + @classmethod + def make_requirement_preparer( + cls, + temp_build_dir: TempDirectory, + options: Values, + req_tracker: RequirementTracker, + session: PipSession, + finder: PackageFinder, + use_user_site: bool, + download_dir: Optional[str] = None, + ) -> RequirementPreparer: + """ + Create a RequirementPreparer instance for the given parameters. + """ + temp_build_dir_path = temp_build_dir.path + assert temp_build_dir_path is not None + + resolver_variant = cls.determine_resolver_variant(options) + if resolver_variant == "2020-resolver": + lazy_wheel = "fast-deps" in options.features_enabled + if lazy_wheel: + logger.warning( + "pip is using lazily downloaded wheels using HTTP " + "range requests to obtain dependency information. " + "This experimental feature is enabled through " + "--use-feature=fast-deps and it is not ready for " + "production." + ) + else: + lazy_wheel = False + if "fast-deps" in options.features_enabled: + logger.warning( + "fast-deps has no effect when used with the legacy resolver." + ) + + in_tree_build = "out-of-tree-build" not in options.deprecated_features_enabled + if "in-tree-build" in options.features_enabled: + deprecated( + reason="In-tree builds are now the default.", + replacement="to remove the --use-feature=in-tree-build flag", + gone_in="22.1", + ) + if "out-of-tree-build" in options.deprecated_features_enabled: + deprecated( + reason="Out-of-tree builds are deprecated.", + replacement=None, + gone_in="22.1", + ) + + return RequirementPreparer( + build_dir=temp_build_dir_path, + src_dir=options.src_dir, + download_dir=download_dir, + build_isolation=options.build_isolation, + req_tracker=req_tracker, + session=session, + progress_bar=options.progress_bar, + finder=finder, + require_hashes=options.require_hashes, + use_user_site=use_user_site, + lazy_wheel=lazy_wheel, + in_tree_build=in_tree_build, + ) + + @classmethod + def make_resolver( + cls, + preparer: RequirementPreparer, + finder: PackageFinder, + options: Values, + wheel_cache: Optional[WheelCache] = None, + use_user_site: bool = False, + ignore_installed: bool = True, + ignore_requires_python: bool = False, + force_reinstall: bool = False, + upgrade_strategy: str = "to-satisfy-only", + use_pep517: Optional[bool] = None, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> BaseResolver: + """ + Create a Resolver instance for the given parameters. + """ + make_install_req = partial( + install_req_from_req_string, + isolated=options.isolated_mode, + use_pep517=use_pep517, + ) + resolver_variant = cls.determine_resolver_variant(options) + # The long import name and duplicated invocation is needed to convince + # Mypy into correctly typechecking. Otherwise it would complain the + # "Resolver" class being redefined. + if resolver_variant == "2020-resolver": + import pip._internal.resolution.resolvelib.resolver + + return pip._internal.resolution.resolvelib.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + import pip._internal.resolution.legacy.resolver + + return pip._internal.resolution.legacy.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + + def get_requirements( + self, + args: List[str], + options: Values, + finder: PackageFinder, + session: PipSession, + ) -> List[InstallRequirement]: + """ + Parse command-line arguments into the corresponding requirements. + """ + requirements: List[InstallRequirement] = [] + for filename in options.constraints: + for parsed_req in parse_requirements( + filename, + constraint=True, + finder=finder, + options=options, + session=session, + ): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + user_supplied=False, + ) + requirements.append(req_to_add) + + for req in args: + req_to_add = install_req_from_line( + req, + None, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + ) + requirements.append(req_to_add) + + for req in options.editables: + req_to_add = install_req_from_editable( + req, + user_supplied=True, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + ) + requirements.append(req_to_add) + + # NOTE: options.require_hashes may be set if --require-hashes is True + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, finder=finder, options=options, session=session + ): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + ) + requirements.append(req_to_add) + + # If any requirement has hash options, enable hash checking. + if any(req.has_hash_options for req in requirements): + options.require_hashes = True + + if not (args or options.editables or options.requirements): + opts = {"name": self.name} + if options.find_links: + raise CommandError( + "You must give at least one requirement to {name} " + '(maybe you meant "pip {name} {links}"?)'.format( + **dict(opts, links=" ".join(options.find_links)) + ) + ) + else: + raise CommandError( + "You must give at least one requirement to {name} " + '(see "pip help {name}")'.format(**opts) + ) + + return requirements + + @staticmethod + def trace_basic_info(finder: PackageFinder) -> None: + """ + Trace basic information about the provided objects. + """ + # Display where finder is looking for packages + search_scope = finder.search_scope + locations = search_scope.get_formatted_locations() + if locations: + logger.info(locations) + + def _build_package_finder( + self, + options: Values, + session: PipSession, + target_python: Optional[TargetPython] = None, + ignore_requires_python: Optional[bool] = None, + ) -> PackageFinder: + """ + Create a package finder appropriate to this requirement command. + + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + link_collector = LinkCollector.create(session, options=options) + selection_prefs = SelectionPreferences( + allow_yanked=True, + format_control=options.format_control, + allow_all_prereleases=options.pre, + prefer_binary=options.prefer_binary, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/spinners.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/spinners.py new file mode 100644 index 0000000..1e313e1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/spinners.py @@ -0,0 +1,157 @@ +import contextlib +import itertools +import logging +import sys +import time +from typing import IO, Iterator + +from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation + +logger = logging.getLogger(__name__) + + +class SpinnerInterface: + def spin(self) -> None: + raise NotImplementedError() + + def finish(self, final_status: str) -> None: + raise NotImplementedError() + + +class InteractiveSpinner(SpinnerInterface): + def __init__( + self, + message: str, + file: IO[str] = None, + spin_chars: str = "-\\|/", + # Empirically, 8 updates/second looks nice + min_update_interval_seconds: float = 0.125, + ): + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status: str) -> None: + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self) -> None: + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status: str) -> None: + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(SpinnerInterface): + def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None: + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status: str) -> None: + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self) -> None: + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status: str) -> None: + if self._finished: + return + self._update(f"finished with status '{final_status}'") + self._finished = True + + +class RateLimiter: + def __init__(self, min_update_interval_seconds: float) -> None: + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update: float = 0 + + def ready(self) -> bool: + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self) -> None: + self._last_update = time.time() + + +@contextlib.contextmanager +def open_spinner(message: str) -> Iterator[SpinnerInterface]: + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner: SpinnerInterface = InteractiveSpinner(message) + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") + + +@contextlib.contextmanager +def hidden_cursor(file: IO[str]) -> Iterator[None]: + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + # We don't want to clutter the output with control characters if we're + # writing to a file, or if the user is running with --quiet. + # See https://github.com/pypa/pip/issues/3418 + elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/cli/status_codes.py b/.venv/lib/python3.9/site-packages/pip/_internal/cli/status_codes.py new file mode 100644 index 0000000..5e29502 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/cli/status_codes.py @@ -0,0 +1,6 @@ +SUCCESS = 0 +ERROR = 1 +UNKNOWN_ERROR = 2 +VIRTUALENV_NOT_FOUND = 3 +PREVIOUS_BUILD_DIR_ERROR = 4 +NO_MATCHES_FOUND = 23 diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__init__.py new file mode 100644 index 0000000..c72f24f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__init__.py @@ -0,0 +1,127 @@ +""" +Package containing all pip commands +""" + +import importlib +from collections import namedtuple +from typing import Any, Dict, Optional + +from pip._internal.cli.base_command import Command + +CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary") + +# This dictionary does a bunch of heavy lifting for help output: +# - Enables avoiding additional (costly) imports for presenting `--help`. +# - The ordering matters for help display. +# +# Even though the module path starts with the same "pip._internal.commands" +# prefix, the full path makes testing easier (specifically when modifying +# `commands_dict` in test setup / teardown). +commands_dict: Dict[str, CommandInfo] = { + "install": CommandInfo( + "pip._internal.commands.install", + "InstallCommand", + "Install packages.", + ), + "download": CommandInfo( + "pip._internal.commands.download", + "DownloadCommand", + "Download packages.", + ), + "uninstall": CommandInfo( + "pip._internal.commands.uninstall", + "UninstallCommand", + "Uninstall packages.", + ), + "freeze": CommandInfo( + "pip._internal.commands.freeze", + "FreezeCommand", + "Output installed packages in requirements format.", + ), + "list": CommandInfo( + "pip._internal.commands.list", + "ListCommand", + "List installed packages.", + ), + "show": CommandInfo( + "pip._internal.commands.show", + "ShowCommand", + "Show information about installed packages.", + ), + "check": CommandInfo( + "pip._internal.commands.check", + "CheckCommand", + "Verify installed packages have compatible dependencies.", + ), + "config": CommandInfo( + "pip._internal.commands.configuration", + "ConfigurationCommand", + "Manage local and global configuration.", + ), + "search": CommandInfo( + "pip._internal.commands.search", + "SearchCommand", + "Search PyPI for packages.", + ), + "cache": CommandInfo( + "pip._internal.commands.cache", + "CacheCommand", + "Inspect and manage pip's wheel cache.", + ), + "index": CommandInfo( + "pip._internal.commands.index", + "IndexCommand", + "Inspect information available from package indexes.", + ), + "wheel": CommandInfo( + "pip._internal.commands.wheel", + "WheelCommand", + "Build wheels from your requirements.", + ), + "hash": CommandInfo( + "pip._internal.commands.hash", + "HashCommand", + "Compute hashes of package archives.", + ), + "completion": CommandInfo( + "pip._internal.commands.completion", + "CompletionCommand", + "A helper command used for command completion.", + ), + "debug": CommandInfo( + "pip._internal.commands.debug", + "DebugCommand", + "Show information useful for debugging.", + ), + "help": CommandInfo( + "pip._internal.commands.help", + "HelpCommand", + "Show help for commands.", + ), +} + + +def create_command(name: str, **kwargs: Any) -> Command: + """ + Create an instance of the Command class with the given name. + """ + module_path, class_name, summary = commands_dict[name] + module = importlib.import_module(module_path) + command_class = getattr(module, class_name) + command = command_class(name=name, summary=summary, **kwargs) + + return command + + +def get_similar_commands(name: str) -> Optional[str]: + """Command name auto-correct.""" + from difflib import get_close_matches + + name = name.lower() + + close_commands = get_close_matches(name, commands_dict.keys()) + + if close_commands: + return close_commands[0] + else: + return None diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..2247ac4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/cache.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/cache.cpython-39.pyc new file mode 100644 index 0000000..8b2b332 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/cache.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/check.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/check.cpython-39.pyc new file mode 100644 index 0000000..f945f5e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/check.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/completion.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/completion.cpython-39.pyc new file mode 100644 index 0000000..61cdfaf Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/completion.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-39.pyc new file mode 100644 index 0000000..237cc9f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/debug.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/debug.cpython-39.pyc new file mode 100644 index 0000000..a8b64d2 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/debug.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/download.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/download.cpython-39.pyc new file mode 100644 index 0000000..1967957 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/download.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-39.pyc new file mode 100644 index 0000000..a902df4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/hash.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/hash.cpython-39.pyc new file mode 100644 index 0000000..fc51363 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/hash.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/help.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/help.cpython-39.pyc new file mode 100644 index 0000000..a86b63e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/help.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/index.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/index.cpython-39.pyc new file mode 100644 index 0000000..eb15709 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/index.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/install.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/install.cpython-39.pyc new file mode 100644 index 0000000..a01acfe Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/install.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/list.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/list.cpython-39.pyc new file mode 100644 index 0000000..195d5ef Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/list.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/search.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/search.cpython-39.pyc new file mode 100644 index 0000000..e88a66f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/search.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/show.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/show.cpython-39.pyc new file mode 100644 index 0000000..c071665 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/show.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc new file mode 100644 index 0000000..393e2cb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-39.pyc new file mode 100644 index 0000000..eca8949 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/cache.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/cache.py new file mode 100644 index 0000000..f1a489d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/cache.py @@ -0,0 +1,223 @@ +import os +import textwrap +from optparse import Values +from typing import Any, List + +import pip._internal.utils.filesystem as filesystem +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, PipError +from pip._internal.utils.logging import getLogger + +logger = getLogger(__name__) + + +class CacheCommand(Command): + """ + Inspect and manage pip's wheel cache. + + Subcommands: + + - dir: Show the cache directory. + - info: Show information about the cache. + - list: List filenames of packages stored in the cache. + - remove: Remove one or more package from the cache. + - purge: Remove all items from the cache. + + ```` can be a glob expression or a package name. + """ + + ignore_require_venv = True + usage = """ + %prog dir + %prog info + %prog list [] [--format=[human, abspath]] + %prog remove + %prog purge + """ + + def add_options(self) -> None: + + self.cmd_opts.add_option( + "--format", + action="store", + dest="list_format", + default="human", + choices=("human", "abspath"), + help="Select the output format among: human (default) or abspath", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + handlers = { + "dir": self.get_cache_dir, + "info": self.get_cache_info, + "list": self.list_cache_items, + "remove": self.remove_cache_items, + "purge": self.purge_cache, + } + + if not options.cache_dir: + logger.error("pip cache commands can not function since cache is disabled.") + return ERROR + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def get_cache_dir(self, options: Values, args: List[Any]) -> None: + if args: + raise CommandError("Too many arguments") + + logger.info(options.cache_dir) + + def get_cache_info(self, options: Values, args: List[Any]) -> None: + if args: + raise CommandError("Too many arguments") + + num_http_files = len(self._find_http_files(options)) + num_packages = len(self._find_wheels(options, "*")) + + http_cache_location = self._cache_dir(options, "http") + wheels_cache_location = self._cache_dir(options, "wheels") + http_cache_size = filesystem.format_directory_size(http_cache_location) + wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) + + message = ( + textwrap.dedent( + """ + Package index page cache location: {http_cache_location} + Package index page cache size: {http_cache_size} + Number of HTTP files: {num_http_files} + Wheels location: {wheels_cache_location} + Wheels size: {wheels_cache_size} + Number of wheels: {package_count} + """ + ) + .format( + http_cache_location=http_cache_location, + http_cache_size=http_cache_size, + num_http_files=num_http_files, + wheels_cache_location=wheels_cache_location, + package_count=num_packages, + wheels_cache_size=wheels_cache_size, + ) + .strip() + ) + + logger.info(message) + + def list_cache_items(self, options: Values, args: List[Any]) -> None: + if len(args) > 1: + raise CommandError("Too many arguments") + + if args: + pattern = args[0] + else: + pattern = "*" + + files = self._find_wheels(options, pattern) + if options.list_format == "human": + self.format_for_human(files) + else: + self.format_for_abspath(files) + + def format_for_human(self, files: List[str]) -> None: + if not files: + logger.info("Nothing cached.") + return + + results = [] + for filename in files: + wheel = os.path.basename(filename) + size = filesystem.format_file_size(filename) + results.append(f" - {wheel} ({size})") + logger.info("Cache contents:\n") + logger.info("\n".join(sorted(results))) + + def format_for_abspath(self, files: List[str]) -> None: + if not files: + return + + results = [] + for filename in files: + results.append(filename) + + logger.info("\n".join(sorted(results))) + + def remove_cache_items(self, options: Values, args: List[Any]) -> None: + if len(args) > 1: + raise CommandError("Too many arguments") + + if not args: + raise CommandError("Please provide a pattern") + + files = self._find_wheels(options, args[0]) + + no_matching_msg = "No matching packages" + if args[0] == "*": + # Only fetch http files if no specific pattern given + files += self._find_http_files(options) + else: + # Add the pattern to the log message + no_matching_msg += ' for pattern "{}"'.format(args[0]) + + if not files: + logger.warning(no_matching_msg) + + for filename in files: + os.unlink(filename) + logger.verbose("Removed %s", filename) + logger.info("Files removed: %s", len(files)) + + def purge_cache(self, options: Values, args: List[Any]) -> None: + if args: + raise CommandError("Too many arguments") + + return self.remove_cache_items(options, ["*"]) + + def _cache_dir(self, options: Values, subdir: str) -> str: + return os.path.join(options.cache_dir, subdir) + + def _find_http_files(self, options: Values) -> List[str]: + http_dir = self._cache_dir(options, "http") + return filesystem.find_files(http_dir, "*") + + def _find_wheels(self, options: Values, pattern: str) -> List[str]: + wheel_dir = self._cache_dir(options, "wheels") + + # The wheel filename format, as specified in PEP 427, is: + # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl + # + # Additionally, non-alphanumeric values in the distribution are + # normalized to underscores (_), meaning hyphens can never occur + # before `-{version}`. + # + # Given that information: + # - If the pattern we're given contains a hyphen (-), the user is + # providing at least the version. Thus, we can just append `*.whl` + # to match the rest of it. + # - If the pattern we're given doesn't contain a hyphen (-), the + # user is only providing the name. Thus, we append `-*.whl` to + # match the hyphen before the version, followed by anything else. + # + # PEP 427: https://www.python.org/dev/peps/pep-0427/ + pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl") + + return filesystem.find_files(wheel_dir, pattern) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/check.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/check.py new file mode 100644 index 0000000..3864220 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/check.py @@ -0,0 +1,53 @@ +import logging +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.operations.check import ( + check_package_set, + create_package_set_from_installed, +) +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class CheckCommand(Command): + """Verify installed packages have compatible dependencies.""" + + usage = """ + %prog [options]""" + + def run(self, options: Values, args: List[str]) -> int: + + package_set, parsing_probs = create_package_set_from_installed() + missing, conflicting = check_package_set(package_set) + + for project_name in missing: + version = package_set[project_name].version + for dependency in missing[project_name]: + write_output( + "%s %s requires %s, which is not installed.", + project_name, + version, + dependency[0], + ) + + for project_name in conflicting: + version = package_set[project_name].version + for dep_name, dep_version, req in conflicting[project_name]: + write_output( + "%s %s has requirement %s, but you have %s %s.", + project_name, + version, + req, + dep_name, + dep_version, + ) + + if missing or conflicting or parsing_probs: + return ERROR + else: + write_output("No broken requirements found.") + return SUCCESS diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/completion.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/completion.py new file mode 100644 index 0000000..c0fb4ca --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/completion.py @@ -0,0 +1,96 @@ +import sys +import textwrap +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.utils.misc import get_prog + +BASE_COMPLETION = """ +# pip {shell} completion start{script}# pip {shell} completion end +""" + +COMPLETION_SCRIPTS = { + "bash": """ + _pip_completion() + {{ + COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\ + COMP_CWORD=$COMP_CWORD \\ + PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) ) + }} + complete -o default -F _pip_completion {prog} + """, + "zsh": """ + function _pip_completion {{ + local words cword + read -Ac words + read -cn cword + reply=( $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$(( cword-1 )) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) + }} + compctl -K _pip_completion {prog} + """, + "fish": """ + function __fish_complete_pip + set -lx COMP_WORDS (commandline -o) "" + set -lx COMP_CWORD ( \\ + math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ + ) + set -lx PIP_AUTO_COMPLETE 1 + string split \\ -- (eval $COMP_WORDS[1]) + end + complete -fa "(__fish_complete_pip)" -c {prog} + """, +} + + +class CompletionCommand(Command): + """A helper command to be used for command completion.""" + + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--bash", + "-b", + action="store_const", + const="bash", + dest="shell", + help="Emit completion code for bash", + ) + self.cmd_opts.add_option( + "--zsh", + "-z", + action="store_const", + const="zsh", + dest="shell", + help="Emit completion code for zsh", + ) + self.cmd_opts.add_option( + "--fish", + "-f", + action="store_const", + const="fish", + dest="shell", + help="Emit completion code for fish", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + """Prints the completion code of the given shell""" + shells = COMPLETION_SCRIPTS.keys() + shell_options = ["--" + shell for shell in sorted(shells)] + if options.shell in shells: + script = textwrap.dedent( + COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog()) + ) + print(BASE_COMPLETION.format(script=script, shell=options.shell)) + return SUCCESS + else: + sys.stderr.write( + "ERROR: You must pass {}\n".format(" or ".join(shell_options)) + ) + return SUCCESS diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/configuration.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/configuration.py new file mode 100644 index 0000000..c6c74ed --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/configuration.py @@ -0,0 +1,266 @@ +import logging +import os +import subprocess +from optparse import Values +from typing import Any, List, Optional + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.configuration import ( + Configuration, + Kind, + get_configuration_files, + kinds, +) +from pip._internal.exceptions import PipError +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_prog, write_output + +logger = logging.getLogger(__name__) + + +class ConfigurationCommand(Command): + """ + Manage local and global configuration. + + Subcommands: + + - list: List the active configuration (or from the file specified) + - edit: Edit the configuration file in an editor + - get: Get the value associated with name + - set: Set the name=value + - unset: Unset the value associated with name + - debug: List the configuration files and values defined under them + + If none of --user, --global and --site are passed, a virtual + environment configuration file is used if one is active and the file + exists. Otherwise, all modifications happen to the user file by + default. + """ + + ignore_require_venv = True + usage = """ + %prog [] list + %prog [] [--editor ] edit + + %prog [] get name + %prog [] set name value + %prog [] unset name + %prog [] debug + """ + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--editor", + dest="editor", + action="store", + default=None, + help=( + "Editor to use to edit the file. Uses VISUAL or EDITOR " + "environment variables if not provided." + ), + ) + + self.cmd_opts.add_option( + "--global", + dest="global_file", + action="store_true", + default=False, + help="Use the system-wide configuration file only", + ) + + self.cmd_opts.add_option( + "--user", + dest="user_file", + action="store_true", + default=False, + help="Use the user configuration file only", + ) + + self.cmd_opts.add_option( + "--site", + dest="site_file", + action="store_true", + default=False, + help="Use the current environment configuration file only", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + handlers = { + "list": self.list_values, + "edit": self.open_in_editor, + "get": self.get_name, + "set": self.set_name_value, + "unset": self.unset_name, + "debug": self.list_config_values, + } + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Determine which configuration files are to be loaded + # Depends on whether the command is modifying. + try: + load_only = self._determine_file( + options, need_value=(action in ["get", "set", "unset", "edit"]) + ) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + # Load a new configuration + self.configuration = Configuration( + isolated=options.isolated_mode, load_only=load_only + ) + self.configuration.load() + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]: + file_options = [ + key + for key, value in ( + (kinds.USER, options.user_file), + (kinds.GLOBAL, options.global_file), + (kinds.SITE, options.site_file), + ) + if value + ] + + if not file_options: + if not need_value: + return None + # Default to user, unless there's a site file. + elif any( + os.path.exists(site_config_file) + for site_config_file in get_configuration_files()[kinds.SITE] + ): + return kinds.SITE + else: + return kinds.USER + elif len(file_options) == 1: + return file_options[0] + + raise PipError( + "Need exactly one file to operate upon " + "(--user, --site, --global) to perform." + ) + + def list_values(self, options: Values, args: List[str]) -> None: + self._get_n_args(args, "list", n=0) + + for key, value in sorted(self.configuration.items()): + write_output("%s=%r", key, value) + + def get_name(self, options: Values, args: List[str]) -> None: + key = self._get_n_args(args, "get [name]", n=1) + value = self.configuration.get_value(key) + + write_output("%s", value) + + def set_name_value(self, options: Values, args: List[str]) -> None: + key, value = self._get_n_args(args, "set [name] [value]", n=2) + self.configuration.set_value(key, value) + + self._save_configuration() + + def unset_name(self, options: Values, args: List[str]) -> None: + key = self._get_n_args(args, "unset [name]", n=1) + self.configuration.unset_value(key) + + self._save_configuration() + + def list_config_values(self, options: Values, args: List[str]) -> None: + """List config key-value pairs across different config files""" + self._get_n_args(args, "debug", n=0) + + self.print_env_var_values() + # Iterate over config files and print if they exist, and the + # key-value pairs present in them if they do + for variant, files in sorted(self.configuration.iter_config_files()): + write_output("%s:", variant) + for fname in files: + with indent_log(): + file_exists = os.path.exists(fname) + write_output("%s, exists: %r", fname, file_exists) + if file_exists: + self.print_config_file_values(variant) + + def print_config_file_values(self, variant: Kind) -> None: + """Get key-value pairs from the file of a variant""" + for name, value in self.configuration.get_values_in_config(variant).items(): + with indent_log(): + write_output("%s: %s", name, value) + + def print_env_var_values(self) -> None: + """Get key-values pairs present as environment variables""" + write_output("%s:", "env_var") + with indent_log(): + for key, value in sorted(self.configuration.get_environ_vars()): + env_var = f"PIP_{key.upper()}" + write_output("%s=%r", env_var, value) + + def open_in_editor(self, options: Values, args: List[str]) -> None: + editor = self._determine_editor(options) + + fname = self.configuration.get_file_to_edit() + if fname is None: + raise PipError("Could not determine appropriate file.") + + try: + subprocess.check_call([editor, fname]) + except subprocess.CalledProcessError as e: + raise PipError( + "Editor Subprocess exited with exit code {}".format(e.returncode) + ) + + def _get_n_args(self, args: List[str], example: str, n: int) -> Any: + """Helper to make sure the command got the right number of arguments""" + if len(args) != n: + msg = ( + "Got unexpected number of arguments, expected {}. " + '(example: "{} config {}")' + ).format(n, get_prog(), example) + raise PipError(msg) + + if n == 1: + return args[0] + else: + return args + + def _save_configuration(self) -> None: + # We successfully ran a modifying command. Need to save the + # configuration. + try: + self.configuration.save() + except Exception: + logger.exception( + "Unable to save configuration. Please report this as a bug." + ) + raise PipError("Internal Error.") + + def _determine_editor(self, options: Values) -> str: + if options.editor is not None: + return options.editor + elif "VISUAL" in os.environ: + return os.environ["VISUAL"] + elif "EDITOR" in os.environ: + return os.environ["EDITOR"] + else: + raise PipError("Could not determine editor to use.") diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/debug.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/debug.py new file mode 100644 index 0000000..d3f1f28 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/debug.py @@ -0,0 +1,202 @@ +import locale +import logging +import os +import sys +from optparse import Values +from types import ModuleType +from typing import Any, Dict, List, Optional + +import pip._vendor +from pip._vendor.certifi import where +from pip._vendor.packaging.version import parse as parse_version + +from pip import __file__ as pip_location +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.configuration import Configuration +from pip._internal.metadata import get_environment +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_pip_version + +logger = logging.getLogger(__name__) + + +def show_value(name: str, value: Any) -> None: + logger.info("%s: %s", name, value) + + +def show_sys_implementation() -> None: + logger.info("sys.implementation:") + implementation_name = sys.implementation.name + with indent_log(): + show_value("name", implementation_name) + + +def create_vendor_txt_map() -> Dict[str, str]: + vendor_txt_path = os.path.join( + os.path.dirname(pip_location), "_vendor", "vendor.txt" + ) + + with open(vendor_txt_path) as f: + # Purge non version specifying lines. + # Also, remove any space prefix or suffixes (including comments). + lines = [ + line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line + ] + + # Transform into "module" -> version dict. + return dict(line.split("==", 1) for line in lines) # type: ignore + + +def get_module_from_module_name(module_name: str) -> ModuleType: + # Module name can be uppercase in vendor.txt for some reason... + module_name = module_name.lower() + # PATCH: setuptools is actually only pkg_resources. + if module_name == "setuptools": + module_name = "pkg_resources" + + __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0) + return getattr(pip._vendor, module_name) + + +def get_vendor_version_from_module(module_name: str) -> Optional[str]: + module = get_module_from_module_name(module_name) + version = getattr(module, "__version__", None) + + if not version: + # Try to find version in debundled module info. + env = get_environment([os.path.dirname(module.__file__)]) + dist = env.get_distribution(module_name) + if dist: + version = str(dist.version) + + return version + + +def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None: + """Log the actual version and print extra info if there is + a conflict or if the actual version could not be imported. + """ + for module_name, expected_version in vendor_txt_versions.items(): + extra_message = "" + actual_version = get_vendor_version_from_module(module_name) + if not actual_version: + extra_message = ( + " (Unable to locate actual module version, using" + " vendor.txt specified version)" + ) + actual_version = expected_version + elif parse_version(actual_version) != parse_version(expected_version): + extra_message = ( + " (CONFLICT: vendor.txt suggests version should" + " be {})".format(expected_version) + ) + logger.info("%s==%s%s", module_name, actual_version, extra_message) + + +def show_vendor_versions() -> None: + logger.info("vendored library versions:") + + vendor_txt_versions = create_vendor_txt_map() + with indent_log(): + show_actual_vendor_versions(vendor_txt_versions) + + +def show_tags(options: Values) -> None: + tag_limit = 10 + + target_python = make_target_python(options) + tags = target_python.get_tags() + + # Display the target options that were explicitly provided. + formatted_target = target_python.format_given() + suffix = "" + if formatted_target: + suffix = f" (target: {formatted_target})" + + msg = "Compatible tags: {}{}".format(len(tags), suffix) + logger.info(msg) + + if options.verbose < 1 and len(tags) > tag_limit: + tags_limited = True + tags = tags[:tag_limit] + else: + tags_limited = False + + with indent_log(): + for tag in tags: + logger.info(str(tag)) + + if tags_limited: + msg = ( + "...\n[First {tag_limit} tags shown. Pass --verbose to show all.]" + ).format(tag_limit=tag_limit) + logger.info(msg) + + +def ca_bundle_info(config: Configuration) -> str: + levels = set() + for key, _ in config.items(): + levels.add(key.split(".")[0]) + + if not levels: + return "Not specified" + + levels_that_override_global = ["install", "wheel", "download"] + global_overriding_level = [ + level for level in levels if level in levels_that_override_global + ] + if not global_overriding_level: + return "global" + + if "global" in levels: + levels.remove("global") + return ", ".join(levels) + + +class DebugCommand(Command): + """ + Display debug information. + """ + + usage = """ + %prog """ + ignore_require_venv = True + + def add_options(self) -> None: + cmdoptions.add_target_python_options(self.cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + self.parser.config.load() + + def run(self, options: Values, args: List[str]) -> int: + logger.warning( + "This command is only meant for debugging. " + "Do not use this with automation for parsing and getting these " + "details, since the output and options of this command may " + "change without notice." + ) + show_value("pip version", get_pip_version()) + show_value("sys.version", sys.version) + show_value("sys.executable", sys.executable) + show_value("sys.getdefaultencoding", sys.getdefaultencoding()) + show_value("sys.getfilesystemencoding", sys.getfilesystemencoding()) + show_value( + "locale.getpreferredencoding", + locale.getpreferredencoding(), + ) + show_value("sys.platform", sys.platform) + show_sys_implementation() + + show_value("'cert' config value", ca_bundle_info(self.parser.config)) + show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE")) + show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE")) + show_value("pip._vendor.certifi.where()", where()) + show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED) + + show_vendor_versions() + + show_tags(options) + + return SUCCESS diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/download.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/download.py new file mode 100644 index 0000000..7de207f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/download.py @@ -0,0 +1,139 @@ +import logging +import os +from optparse import Values +from typing import List + +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path, write_output +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class DownloadCommand(RequirementCommand): + """ + Download packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports downloading from "requirements files", which provide + an easy way to specify a whole environment to be downloaded. + """ + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] ... + %prog [options] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.global_options()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + + self.cmd_opts.add_option( + "-d", + "--dest", + "--destination-dir", + "--destination-directory", + dest="download_dir", + metavar="dir", + default=os.curdir, + help="Download packages into .", + ) + + cmdoptions.add_target_python_options(self.cmd_opts) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: List[str]) -> int: + + options.ignore_installed = True + # editable doesn't really make sense for `pip download`, but the bowels + # of the RequirementSet code require that property. + options.editables = [] + + cmdoptions.check_dist_restriction(options) + + options.download_dir = normalize_path(options.download_dir) + ensure_dir(options.download_dir) + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="download", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + download_dir=options.download_dir, + use_user_site=False, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + ignore_requires_python=options.ignore_requires_python, + py_version_info=options.python_version, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + + downloaded: List[str] = [] + for req in requirement_set.requirements.values(): + if req.satisfied_by is None: + assert req.name is not None + preparer.save_linked_requirement(req) + downloaded.append(req.name) + if downloaded: + write_output("Successfully downloaded %s", " ".join(downloaded)) + + return SUCCESS diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/freeze.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/freeze.py new file mode 100644 index 0000000..5fa6d39 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/freeze.py @@ -0,0 +1,97 @@ +import sys +from optparse import Values +from typing import List + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.operations.freeze import freeze +from pip._internal.utils.compat import stdlib_pkgs + +DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"} + + +class FreezeCommand(Command): + """ + Output installed packages in requirements format. + + packages are listed in a case-insensitive sorted order. + """ + + usage = """ + %prog [options]""" + log_streams = ("ext://sys.stderr", "ext://sys.stderr") + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help=( + "Use the order in the given requirements file and its " + "comments when generating output. This option can be " + "used multiple times." + ), + ) + self.cmd_opts.add_option( + "-l", + "--local", + dest="local", + action="store_true", + default=False, + help=( + "If in a virtualenv that has global access, do not output " + "globally-installed packages." + ), + ) + self.cmd_opts.add_option( + "--user", + dest="user", + action="store_true", + default=False, + help="Only output packages installed in user-site.", + ) + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + "--all", + dest="freeze_all", + action="store_true", + help=( + "Do not skip these packages in the output:" + " {}".format(", ".join(DEV_PKGS)) + ), + ) + self.cmd_opts.add_option( + "--exclude-editable", + dest="exclude_editable", + action="store_true", + help="Exclude editable package from output.", + ) + self.cmd_opts.add_option(cmdoptions.list_exclude()) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + skip = set(stdlib_pkgs) + if not options.freeze_all: + skip.update(DEV_PKGS) + + if options.excludes: + skip.update(options.excludes) + + cmdoptions.check_list_path_option(options) + + for line in freeze( + requirement=options.requirements, + local_only=options.local, + user_only=options.user, + paths=options.path, + isolated=options.isolated_mode, + skip=skip, + exclude_editable=options.exclude_editable, + ): + sys.stdout.write(line + "\n") + return SUCCESS diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/hash.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/hash.py new file mode 100644 index 0000000..042dac8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/hash.py @@ -0,0 +1,59 @@ +import hashlib +import logging +import sys +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES +from pip._internal.utils.misc import read_chunks, write_output + +logger = logging.getLogger(__name__) + + +class HashCommand(Command): + """ + Compute a hash of a local package archive. + + These can be used with --hash in a requirements file to do repeatable + installs. + """ + + usage = "%prog [options] ..." + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-a", + "--algorithm", + dest="algorithm", + choices=STRONG_HASHES, + action="store", + default=FAVORITE_HASH, + help="The hash algorithm to use: one of {}".format( + ", ".join(STRONG_HASHES) + ), + ) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + if not args: + self.parser.print_usage(sys.stderr) + return ERROR + + algorithm = options.algorithm + for path in args: + write_output( + "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm) + ) + return SUCCESS + + +def _hash_of_file(path: str, algorithm: str) -> str: + """Return the hash digest of a file.""" + with open(path, "rb") as archive: + hash = hashlib.new(algorithm) + for chunk in read_chunks(archive): + hash.update(chunk) + return hash.hexdigest() diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/help.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/help.py new file mode 100644 index 0000000..6206631 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/help.py @@ -0,0 +1,41 @@ +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError + + +class HelpCommand(Command): + """Show help for commands""" + + usage = """ + %prog """ + ignore_require_venv = True + + def run(self, options: Values, args: List[str]) -> int: + from pip._internal.commands import ( + commands_dict, + create_command, + get_similar_commands, + ) + + try: + # 'pip help' with no args is handled by pip.__init__.parseopt() + cmd_name = args[0] # the command we need help for + except IndexError: + return SUCCESS + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = [f'unknown command "{cmd_name}"'] + if guess: + msg.append(f'maybe you meant "{guess}"') + + raise CommandError(" - ".join(msg)) + + command = create_command(cmd_name) + command.parser.print_help() + + return SUCCESS diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/index.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/index.py new file mode 100644 index 0000000..b4bf0ac --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/index.py @@ -0,0 +1,138 @@ +import logging +from optparse import Values +from typing import Any, Iterable, List, Optional, Union + +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.commands.search import print_dist_installation_info +from pip._internal.exceptions import CommandError, DistributionNotFound, PipError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.network.session import PipSession +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class IndexCommand(IndexGroupCommand): + """ + Inspect information available from package indexes. + """ + + usage = """ + %prog versions + """ + + def add_options(self) -> None: + cmdoptions.add_target_python_options(self.cmd_opts) + + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + handlers = { + "versions": self.get_available_package_versions, + } + + logger.warning( + "pip index is currently an experimental command. " + "It may be removed/changed in a future release " + "without prior warning." + ) + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _build_package_finder( + self, + options: Values, + session: PipSession, + target_python: Optional[TargetPython] = None, + ignore_requires_python: Optional[bool] = None, + ) -> PackageFinder: + """ + Create a package finder appropriate to the index command. + """ + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=options.pre, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + ) + + def get_available_package_versions(self, options: Values, args: List[Any]) -> None: + if len(args) != 1: + raise CommandError("You need to specify exactly one argument") + + target_python = cmdoptions.make_target_python(options) + query = args[0] + + with self._build_session(options) as session: + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + + versions: Iterable[Union[LegacyVersion, Version]] = ( + candidate.version for candidate in finder.find_all_candidates(query) + ) + + if not options.pre: + # Remove prereleases + versions = ( + version for version in versions if not version.is_prerelease + ) + versions = set(versions) + + if not versions: + raise DistributionNotFound( + "No matching distribution found for {}".format(query) + ) + + formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)] + latest = formatted_versions[0] + + write_output("{} ({})".format(query, latest)) + write_output("Available versions: {}".format(", ".join(formatted_versions))) + print_dist_installation_info(query, latest) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/install.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/install.py new file mode 100644 index 0000000..eedb1ff --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/install.py @@ -0,0 +1,770 @@ +import errno +import operator +import os +import shutil +import site +from optparse import SUPPRESS_HELP, Values +from typing import Iterable, List, Optional + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import ( + RequirementCommand, + warn_if_run_as_root, + with_cleanup, +) +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, InstallationError +from pip._internal.locations import get_scheme +from pip._internal.metadata import get_environment +from pip._internal.models.format_control import FormatControl +from pip._internal.operations.check import ConflictDetails, check_install_conflicts +from pip._internal.req import install_given_reqs +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.distutils_args import parse_distutils_args +from pip._internal.utils.filesystem import test_writable_dir +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ( + ensure_dir, + get_pip_version, + protect_pip_from_modification_on_windows, + write_output, +) +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) +from pip._internal.wheel_builder import ( + BinaryAllowedPredicate, + build, + should_build_for_install_command, +) + +logger = getLogger(__name__) + + +def get_check_binary_allowed(format_control: FormatControl) -> BinaryAllowedPredicate: + def check_binary_allowed(req: InstallRequirement) -> bool: + canonical_name = canonicalize_name(req.name or "") + allowed_formats = format_control.get_allowed_formats(canonical_name) + return "binary" in allowed_formats + + return check_binary_allowed + + +class InstallCommand(RequirementCommand): + """ + Install packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports installing from "requirements files", which provide + an easy way to specify a whole environment to be installed. + """ + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.pre()) + + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option( + "-t", + "--target", + dest="target_dir", + metavar="dir", + default=None, + help=( + "Install packages into . " + "By default this will not replace existing files/folders in " + ". Use --upgrade to replace existing packages in " + "with new versions." + ), + ) + cmdoptions.add_target_python_options(self.cmd_opts) + + self.cmd_opts.add_option( + "--user", + dest="use_user_site", + action="store_true", + help=( + "Install to the Python user install directory for your " + "platform. Typically ~/.local/, or %APPDATA%\\Python on " + "Windows. (See the Python documentation for site.USER_BASE " + "for full details.)" + ), + ) + self.cmd_opts.add_option( + "--no-user", + dest="use_user_site", + action="store_false", + help=SUPPRESS_HELP, + ) + self.cmd_opts.add_option( + "--root", + dest="root_path", + metavar="dir", + default=None, + help="Install everything relative to this alternate root directory.", + ) + self.cmd_opts.add_option( + "--prefix", + dest="prefix_path", + metavar="dir", + default=None, + help=( + "Installation prefix where lib, bin and other top-level " + "folders are placed" + ), + ) + + self.cmd_opts.add_option(cmdoptions.src()) + + self.cmd_opts.add_option( + "-U", + "--upgrade", + dest="upgrade", + action="store_true", + help=( + "Upgrade all specified packages to the newest available " + "version. The handling of dependencies depends on the " + "upgrade-strategy used." + ), + ) + + self.cmd_opts.add_option( + "--upgrade-strategy", + dest="upgrade_strategy", + default="only-if-needed", + choices=["only-if-needed", "eager"], + help=( + "Determines how dependency upgrading should be handled " + "[default: %default]. " + '"eager" - dependencies are upgraded regardless of ' + "whether the currently installed version satisfies the " + "requirements of the upgraded package(s). " + '"only-if-needed" - are upgraded only when they do not ' + "satisfy the requirements of the upgraded package(s)." + ), + ) + + self.cmd_opts.add_option( + "--force-reinstall", + dest="force_reinstall", + action="store_true", + help="Reinstall all packages even if they are already up-to-date.", + ) + + self.cmd_opts.add_option( + "-I", + "--ignore-installed", + dest="ignore_installed", + action="store_true", + help=( + "Ignore the installed packages, overwriting them. " + "This can break your system if the existing package " + "is of a different version or was installed " + "with a different package manager!" + ), + ) + + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + + self.cmd_opts.add_option(cmdoptions.install_options()) + self.cmd_opts.add_option(cmdoptions.global_options()) + + self.cmd_opts.add_option( + "--compile", + action="store_true", + dest="compile", + default=True, + help="Compile Python source files to bytecode", + ) + + self.cmd_opts.add_option( + "--no-compile", + action="store_false", + dest="compile", + help="Do not compile Python source files to bytecode", + ) + + self.cmd_opts.add_option( + "--no-warn-script-location", + action="store_false", + dest="warn_script_location", + default=True, + help="Do not warn when installing scripts outside PATH", + ) + self.cmd_opts.add_option( + "--no-warn-conflicts", + action="store_false", + dest="warn_about_conflicts", + default=True, + help="Do not warn about broken dependencies", + ) + + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: List[str]) -> int: + if options.use_user_site and options.target_dir is not None: + raise CommandError("Can not combine '--user' and '--target'") + + cmdoptions.check_install_build_global(options) + upgrade_strategy = "to-satisfy-only" + if options.upgrade: + upgrade_strategy = options.upgrade_strategy + + cmdoptions.check_dist_restriction(options, check_target=True) + + install_options = options.install_options or [] + + logger.verbose("Using %s", get_pip_version()) + options.use_user_site = decide_user_install( + options.use_user_site, + prefix_path=options.prefix_path, + target_dir=options.target_dir, + root_path=options.root_path, + isolated_mode=options.isolated_mode, + ) + + target_temp_dir: Optional[TempDirectory] = None + target_temp_dir_path: Optional[str] = None + if options.target_dir: + options.ignore_installed = True + options.target_dir = os.path.abspath(options.target_dir) + if ( + # fmt: off + os.path.exists(options.target_dir) and + not os.path.isdir(options.target_dir) + # fmt: on + ): + raise CommandError( + "Target path exists but is not a directory, will not continue." + ) + + # Create a target directory for using with the target option + target_temp_dir = TempDirectory(kind="target") + target_temp_dir_path = target_temp_dir.path + self.enter_context(target_temp_dir) + + global_options = options.global_options or [] + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="install", + globally_managed=True, + ) + + try: + reqs = self.get_requirements(args, options, finder, session) + + # Only when installing is it permitted to use PEP 660. + # In other circumstances (pip wheel, pip download) we generate + # regular (i.e. non editable) metadata and wheels. + for req in reqs: + req.permit_editable_wheels = True + + reject_location_related_install_options(reqs, options.install_options) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + use_user_site=options.use_user_site, + ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + use_user_site=options.use_user_site, + ignore_installed=options.ignore_installed, + ignore_requires_python=options.ignore_requires_python, + force_reinstall=options.force_reinstall, + upgrade_strategy=upgrade_strategy, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=not options.target_dir + ) + + try: + pip_req = requirement_set.get_requirement("pip") + except KeyError: + modifying_pip = False + else: + # If we're not replacing an already installed pip, + # we're not modifying it. + modifying_pip = pip_req.satisfied_by is None + protect_pip_from_modification_on_windows(modifying_pip=modifying_pip) + + check_binary_allowed = get_check_binary_allowed(finder.format_control) + + reqs_to_build = [ + r + for r in requirement_set.requirements.values() + if should_build_for_install_command(r, check_binary_allowed) + ] + + _, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=True, + build_options=[], + global_options=[], + ) + + # If we're using PEP 517, we cannot do a legacy setup.py install + # so we fail here. + pep517_build_failure_names: List[str] = [ + r.name for r in build_failures if r.use_pep517 # type: ignore + ] + if pep517_build_failure_names: + raise InstallationError( + "Could not build wheels for {}, which is required to " + "install pyproject.toml-based projects".format( + ", ".join(pep517_build_failure_names) + ) + ) + + # For now, we just warn about failures building legacy + # requirements, as we'll fall through to a setup.py install for + # those. + for r in build_failures: + if not r.use_pep517: + r.legacy_install_reason = 8368 + + to_install = resolver.get_installation_order(requirement_set) + + # Check for conflicts in the package set we're installing. + conflicts: Optional[ConflictDetails] = None + should_warn_about_conflicts = ( + not options.ignore_dependencies and options.warn_about_conflicts + ) + if should_warn_about_conflicts: + conflicts = self._determine_conflicts(to_install) + + # Don't warn about script install locations if + # --target or --prefix has been specified + warn_script_location = options.warn_script_location + if options.target_dir or options.prefix_path: + warn_script_location = False + + installed = install_given_reqs( + to_install, + install_options, + global_options, + root=options.root_path, + home=target_temp_dir_path, + prefix=options.prefix_path, + warn_script_location=warn_script_location, + use_user_site=options.use_user_site, + pycompile=options.compile, + ) + + lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=target_temp_dir_path, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) + env = get_environment(lib_locations) + + installed.sort(key=operator.attrgetter("name")) + items = [] + for result in installed: + item = result.name + try: + installed_dist = env.get_distribution(item) + if installed_dist is not None: + item = f"{item}-{installed_dist.version}" + except Exception: + pass + items.append(item) + + if conflicts is not None: + self._warn_about_conflicts( + conflicts, + resolver_variant=self.determine_resolver_variant(options), + ) + + installed_desc = " ".join(items) + if installed_desc: + write_output( + "Successfully installed %s", + installed_desc, + ) + except OSError as error: + show_traceback = self.verbosity >= 1 + + message = create_os_error_message( + error, + show_traceback, + options.use_user_site, + ) + logger.error(message, exc_info=show_traceback) # noqa + + return ERROR + + if options.target_dir: + assert target_temp_dir + self._handle_target_dir( + options.target_dir, target_temp_dir, options.upgrade + ) + + warn_if_run_as_root() + return SUCCESS + + def _handle_target_dir( + self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool + ) -> None: + ensure_dir(target_dir) + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + lib_dir_list = [] + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + scheme = get_scheme("", home=target_temp_dir.path) + purelib_dir = scheme.purelib + platlib_dir = scheme.platlib + data_dir = scheme.data + + if os.path.exists(purelib_dir): + lib_dir_list.append(purelib_dir) + if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: + lib_dir_list.append(platlib_dir) + if os.path.exists(data_dir): + lib_dir_list.append(data_dir) + + for lib_dir in lib_dir_list: + for item in os.listdir(lib_dir): + if lib_dir == data_dir: + ddir = os.path.join(data_dir, item) + if any(s.startswith(ddir) for s in lib_dir_list[:-1]): + continue + target_item_dir = os.path.join(target_dir, item) + if os.path.exists(target_item_dir): + if not upgrade: + logger.warning( + "Target directory %s already exists. Specify " + "--upgrade to force replacement.", + target_item_dir, + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + "Target directory %s already exists and is " + "a link. pip will not automatically replace " + "links, please remove if replacement is " + "desired.", + target_item_dir, + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move(os.path.join(lib_dir, item), target_item_dir) + + def _determine_conflicts( + self, to_install: List[InstallRequirement] + ) -> Optional[ConflictDetails]: + try: + return check_install_conflicts(to_install) + except Exception: + logger.exception( + "Error while checking for conflicts. Please file an issue on " + "pip's issue tracker: https://github.com/pypa/pip/issues/new" + ) + return None + + def _warn_about_conflicts( + self, conflict_details: ConflictDetails, resolver_variant: str + ) -> None: + package_set, (missing, conflicting) = conflict_details + if not missing and not conflicting: + return + + parts: List[str] = [] + if resolver_variant == "legacy": + parts.append( + "pip's legacy dependency resolver does not consider dependency " + "conflicts when selecting packages. This behaviour is the " + "source of the following dependency conflicts." + ) + else: + assert resolver_variant == "2020-resolver" + parts.append( + "pip's dependency resolver does not currently take into account " + "all the packages that are installed. This behaviour is the " + "source of the following dependency conflicts." + ) + + # NOTE: There is some duplication here, with commands/check.py + for project_name in missing: + version = package_set[project_name][0] + for dependency in missing[project_name]: + message = ( + "{name} {version} requires {requirement}, " + "which is not installed." + ).format( + name=project_name, + version=version, + requirement=dependency[1], + ) + parts.append(message) + + for project_name in conflicting: + version = package_set[project_name][0] + for dep_name, dep_version, req in conflicting[project_name]: + message = ( + "{name} {version} requires {requirement}, but {you} have " + "{dep_name} {dep_version} which is incompatible." + ).format( + name=project_name, + version=version, + requirement=req, + dep_name=dep_name, + dep_version=dep_version, + you=("you" if resolver_variant == "2020-resolver" else "you'll"), + ) + parts.append(message) + + logger.critical("\n".join(parts)) + + +def get_lib_location_guesses( + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> List[str]: + scheme = get_scheme( + "", + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + return [scheme.purelib, scheme.platlib] + + +def site_packages_writable(root: Optional[str], isolated: bool) -> bool: + return all( + test_writable_dir(d) + for d in set(get_lib_location_guesses(root=root, isolated=isolated)) + ) + + +def decide_user_install( + use_user_site: Optional[bool], + prefix_path: Optional[str] = None, + target_dir: Optional[str] = None, + root_path: Optional[str] = None, + isolated_mode: bool = False, +) -> bool: + """Determine whether to do a user install based on the input options. + + If use_user_site is False, no additional checks are done. + If use_user_site is True, it is checked for compatibility with other + options. + If use_user_site is None, the default behaviour depends on the environment, + which is provided by the other arguments. + """ + # In some cases (config from tox), use_user_site can be set to an integer + # rather than a bool, which 'use_user_site is False' wouldn't catch. + if (use_user_site is not None) and (not use_user_site): + logger.debug("Non-user install by explicit request") + return False + + if use_user_site: + if prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + logger.debug("User install by explicit request") + return True + + # If we are here, user installs have not been explicitly requested/avoided + assert use_user_site is None + + # user install incompatible with --prefix/--target + if prefix_path or target_dir: + logger.debug("Non-user install due to --prefix or --target option") + return False + + # If user installs are not enabled, choose a non-user install + if not site.ENABLE_USER_SITE: + logger.debug("Non-user install because user site-packages disabled") + return False + + # If we have permission for a non-user install, do that, + # otherwise do a user install. + if site_packages_writable(root=root_path, isolated=isolated_mode): + logger.debug("Non-user install because site-packages writeable") + return False + + logger.info( + "Defaulting to user installation because normal site-packages " + "is not writeable" + ) + return True + + +def reject_location_related_install_options( + requirements: List[InstallRequirement], options: Optional[List[str]] +) -> None: + """If any location-changing --install-option arguments were passed for + requirements or on the command-line, then show a deprecation warning. + """ + + def format_options(option_names: Iterable[str]) -> List[str]: + return ["--{}".format(name.replace("_", "-")) for name in option_names] + + offenders = [] + + for requirement in requirements: + install_options = requirement.install_options + location_options = parse_distutils_args(install_options) + if location_options: + offenders.append( + "{!r} from {}".format( + format_options(location_options.keys()), requirement + ) + ) + + if options: + location_options = parse_distutils_args(options) + if location_options: + offenders.append( + "{!r} from command line".format(format_options(location_options.keys())) + ) + + if not offenders: + return + + raise CommandError( + "Location-changing options found in --install-option: {}." + " This is unsupported, use pip-level options like --user," + " --prefix, --root, and --target instead.".format("; ".join(offenders)) + ) + + +def create_os_error_message( + error: OSError, show_traceback: bool, using_user_site: bool +) -> str: + """Format an error message for an OSError + + It may occur anytime during the execution of the install command. + """ + parts = [] + + # Mention the error if we are not going to show a traceback + parts.append("Could not install packages due to an OSError") + if not show_traceback: + parts.append(": ") + parts.append(str(error)) + else: + parts.append(".") + + # Spilt the error indication from a helper message (if any) + parts[-1] += "\n" + + # Suggest useful actions to the user: + # (1) using user site-packages or (2) verifying the permissions + if error.errno == errno.EACCES: + user_option_part = "Consider using the `--user` option" + permissions_part = "Check the permissions" + + if not running_under_virtualenv() and not using_user_site: + parts.extend( + [ + user_option_part, + " or ", + permissions_part.lower(), + ] + ) + else: + parts.append(permissions_part) + parts.append(".\n") + + # Suggest the user to enable Long Paths if path length is + # more than 260 + if ( + WINDOWS + and error.errno == errno.ENOENT + and error.filename + and len(error.filename) > 260 + ): + parts.append( + "HINT: This error might have occurred since " + "this system does not have Windows Long Path " + "support enabled. You can find information on " + "how to enable this at " + "https://pip.pypa.io/warnings/enable-long-paths\n" + ) + + return "".join(parts).strip() + "\n" diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/list.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/list.py new file mode 100644 index 0000000..75d8dd4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/list.py @@ -0,0 +1,361 @@ +import json +import logging +from optparse import Values +from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Tuple, cast + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution, get_environment +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.session import PipSession +from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.misc import tabulate, write_output +from pip._internal.utils.parallel import map_multithread + +if TYPE_CHECKING: + from pip._internal.metadata.base import DistributionVersion + + class _DistWithLatestInfo(BaseDistribution): + """Give the distribution object a couple of extra fields. + + These will be populated during ``get_outdated()``. This is dirty but + makes the rest of the code much cleaner. + """ + + latest_version: DistributionVersion + latest_filetype: str + + _ProcessedDists = Sequence[_DistWithLatestInfo] + + +logger = logging.getLogger(__name__) + + +class ListCommand(IndexGroupCommand): + """ + List installed packages, including editables. + + Packages are listed in a case-insensitive sorted order. + """ + + ignore_require_venv = True + usage = """ + %prog [options]""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-o", + "--outdated", + action="store_true", + default=False, + help="List outdated packages", + ) + self.cmd_opts.add_option( + "-u", + "--uptodate", + action="store_true", + default=False, + help="List uptodate packages", + ) + self.cmd_opts.add_option( + "-e", + "--editable", + action="store_true", + default=False, + help="List editable projects.", + ) + self.cmd_opts.add_option( + "-l", + "--local", + action="store_true", + default=False, + help=( + "If in a virtualenv that has global access, do not list " + "globally-installed packages." + ), + ) + self.cmd_opts.add_option( + "--user", + dest="user", + action="store_true", + default=False, + help="Only output packages installed in user-site.", + ) + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + "--pre", + action="store_true", + default=False, + help=( + "Include pre-release and development versions. By default, " + "pip only finds stable versions." + ), + ) + + self.cmd_opts.add_option( + "--format", + action="store", + dest="list_format", + default="columns", + choices=("columns", "freeze", "json"), + help="Select the output format among: columns (default), freeze, or json", + ) + + self.cmd_opts.add_option( + "--not-required", + action="store_true", + dest="not_required", + help="List packages that are not dependencies of installed packages.", + ) + + self.cmd_opts.add_option( + "--exclude-editable", + action="store_false", + dest="include_editable", + help="Exclude editable package from output.", + ) + self.cmd_opts.add_option( + "--include-editable", + action="store_true", + dest="include_editable", + help="Include editable package from output.", + default=True, + ) + self.cmd_opts.add_option(cmdoptions.list_exclude()) + index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + def _build_package_finder( + self, options: Values, session: PipSession + ) -> PackageFinder: + """ + Create a package finder appropriate to this list command. + """ + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=options.pre, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + ) + + def run(self, options: Values, args: List[str]) -> int: + if options.outdated and options.uptodate: + raise CommandError("Options --outdated and --uptodate cannot be combined.") + + cmdoptions.check_list_path_option(options) + + skip = set(stdlib_pkgs) + if options.excludes: + skip.update(canonicalize_name(n) for n in options.excludes) + + packages: "_ProcessedDists" = [ + cast("_DistWithLatestInfo", d) + for d in get_environment(options.path).iter_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + include_editables=options.include_editable, + skip=skip, + ) + ] + + # get_not_required must be called firstly in order to find and + # filter out all dependencies correctly. Otherwise a package + # can't be identified as requirement because some parent packages + # could be filtered out before. + if options.not_required: + packages = self.get_not_required(packages, options) + + if options.outdated: + packages = self.get_outdated(packages, options) + elif options.uptodate: + packages = self.get_uptodate(packages, options) + + self.output_package_listing(packages, options) + return SUCCESS + + def get_outdated( + self, packages: "_ProcessedDists", options: Values + ) -> "_ProcessedDists": + return [ + dist + for dist in self.iter_packages_latest_infos(packages, options) + if dist.latest_version > dist.version + ] + + def get_uptodate( + self, packages: "_ProcessedDists", options: Values + ) -> "_ProcessedDists": + return [ + dist + for dist in self.iter_packages_latest_infos(packages, options) + if dist.latest_version == dist.version + ] + + def get_not_required( + self, packages: "_ProcessedDists", options: Values + ) -> "_ProcessedDists": + dep_keys = { + canonicalize_name(dep.name) + for dist in packages + for dep in (dist.iter_dependencies() or ()) + } + + # Create a set to remove duplicate packages, and cast it to a list + # to keep the return type consistent with get_outdated and + # get_uptodate + return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys}) + + def iter_packages_latest_infos( + self, packages: "_ProcessedDists", options: Values + ) -> Iterator["_DistWithLatestInfo"]: + with self._build_session(options) as session: + finder = self._build_package_finder(options, session) + + def latest_info( + dist: "_DistWithLatestInfo", + ) -> Optional["_DistWithLatestInfo"]: + all_candidates = finder.find_all_candidates(dist.canonical_name) + if not options.pre: + # Remove prereleases + all_candidates = [ + candidate + for candidate in all_candidates + if not candidate.version.is_prerelease + ] + + evaluator = finder.make_candidate_evaluator( + project_name=dist.canonical_name, + ) + best_candidate = evaluator.sort_best_candidate(all_candidates) + if best_candidate is None: + return None + + remote_version = best_candidate.version + if best_candidate.link.is_wheel: + typ = "wheel" + else: + typ = "sdist" + dist.latest_version = remote_version + dist.latest_filetype = typ + return dist + + for dist in map_multithread(latest_info, packages): + if dist is not None: + yield dist + + def output_package_listing( + self, packages: "_ProcessedDists", options: Values + ) -> None: + packages = sorted( + packages, + key=lambda dist: dist.canonical_name, + ) + if options.list_format == "columns" and packages: + data, header = format_for_columns(packages, options) + self.output_package_listing_columns(data, header) + elif options.list_format == "freeze": + for dist in packages: + if options.verbose >= 1: + write_output( + "%s==%s (%s)", dist.raw_name, dist.version, dist.location + ) + else: + write_output("%s==%s", dist.raw_name, dist.version) + elif options.list_format == "json": + write_output(format_for_json(packages, options)) + + def output_package_listing_columns( + self, data: List[List[str]], header: List[str] + ) -> None: + # insert the header first: we need to know the size of column names + if len(data) > 0: + data.insert(0, header) + + pkg_strings, sizes = tabulate(data) + + # Create and add a separator. + if len(data) > 0: + pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes))) + + for val in pkg_strings: + write_output(val) + + +def format_for_columns( + pkgs: "_ProcessedDists", options: Values +) -> Tuple[List[List[str]], List[str]]: + """ + Convert the package data into something usable + by output_package_listing_columns. + """ + header = ["Package", "Version"] + + running_outdated = options.outdated + if running_outdated: + header.extend(["Latest", "Type"]) + + has_editables = any(x.editable for x in pkgs) + if has_editables: + header.append("Editable project location") + + if options.verbose >= 1: + header.append("Location") + if options.verbose >= 1: + header.append("Installer") + + data = [] + for proj in pkgs: + # if we're working on the 'outdated' list, separate out the + # latest_version and type + row = [proj.raw_name, str(proj.version)] + + if running_outdated: + row.append(str(proj.latest_version)) + row.append(proj.latest_filetype) + + if has_editables: + row.append(proj.editable_project_location or "") + + if options.verbose >= 1: + row.append(proj.location or "") + if options.verbose >= 1: + row.append(proj.installer) + + data.append(row) + + return data, header + + +def format_for_json(packages: "_ProcessedDists", options: Values) -> str: + data = [] + for dist in packages: + info = { + "name": dist.raw_name, + "version": str(dist.version), + } + if options.verbose >= 1: + info["location"] = dist.location or "" + info["installer"] = dist.installer + if options.outdated: + info["latest_version"] = str(dist.latest_version) + info["latest_filetype"] = dist.latest_filetype + editable_project_location = dist.editable_project_location + if editable_project_location: + info["editable_project_location"] = editable_project_location + data.append(info) + return json.dumps(data) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/search.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/search.py new file mode 100644 index 0000000..03ed925 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/search.py @@ -0,0 +1,174 @@ +import logging +import shutil +import sys +import textwrap +import xmlrpc.client +from collections import OrderedDict +from optparse import Values +from typing import TYPE_CHECKING, Dict, List, Optional + +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin +from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.metadata import get_default_environment +from pip._internal.models.index import PyPI +from pip._internal.network.xmlrpc import PipXmlrpcTransport +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import write_output + +if TYPE_CHECKING: + from typing import TypedDict + + class TransformedHit(TypedDict): + name: str + summary: str + versions: List[str] + + +logger = logging.getLogger(__name__) + + +class SearchCommand(Command, SessionCommandMixin): + """Search for PyPI packages whose name or summary contains .""" + + usage = """ + %prog [options] """ + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-i", + "--index", + dest="index", + metavar="URL", + default=PyPI.pypi_url, + help="Base URL of Python Package Index (default %default)", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + if not args: + raise CommandError("Missing required argument (search query).") + query = args + pypi_hits = self.search(query, options) + hits = transform_hits(pypi_hits) + + terminal_width = None + if sys.stdout.isatty(): + terminal_width = shutil.get_terminal_size()[0] + + print_results(hits, terminal_width=terminal_width) + if pypi_hits: + return SUCCESS + return NO_MATCHES_FOUND + + def search(self, query: List[str], options: Values) -> List[Dict[str, str]]: + index_url = options.index + + session = self.get_default_session(options) + + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc.client.ServerProxy(index_url, transport) + try: + hits = pypi.search({"name": query, "summary": query}, "or") + except xmlrpc.client.Fault as fault: + message = "XMLRPC request failed [code: {code}]\n{string}".format( + code=fault.faultCode, + string=fault.faultString, + ) + raise CommandError(message) + assert isinstance(hits, list) + return hits + + +def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]: + """ + The list from pypi is really a list of versions. We want a list of + packages with the list of versions stored inline. This converts the + list from pypi into one we can use. + """ + packages: Dict[str, "TransformedHit"] = OrderedDict() + for hit in hits: + name = hit["name"] + summary = hit["summary"] + version = hit["version"] + + if name not in packages.keys(): + packages[name] = { + "name": name, + "summary": summary, + "versions": [version], + } + else: + packages[name]["versions"].append(version) + + # if this is the highest version, replace summary and score + if version == highest_version(packages[name]["versions"]): + packages[name]["summary"] = summary + + return list(packages.values()) + + +def print_dist_installation_info(name: str, latest: str) -> None: + env = get_default_environment() + dist = env.get_distribution(name) + if dist is not None: + with indent_log(): + if dist.version == latest: + write_output("INSTALLED: %s (latest)", dist.version) + else: + write_output("INSTALLED: %s", dist.version) + if parse_version(latest).pre: + write_output( + "LATEST: %s (pre-release; install" + " with `pip install --pre`)", + latest, + ) + else: + write_output("LATEST: %s", latest) + + +def print_results( + hits: List["TransformedHit"], + name_column_width: Optional[int] = None, + terminal_width: Optional[int] = None, +) -> None: + if not hits: + return + if name_column_width is None: + name_column_width = ( + max( + [ + len(hit["name"]) + len(highest_version(hit.get("versions", ["-"]))) + for hit in hits + ] + ) + + 4 + ) + + for hit in hits: + name = hit["name"] + summary = hit["summary"] or "" + latest = highest_version(hit.get("versions", ["-"])) + if terminal_width is not None: + target_width = terminal_width - name_column_width - 5 + if target_width > 10: + # wrap and indent summary to fit terminal + summary_lines = textwrap.wrap(summary, target_width) + summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines) + + name_latest = f"{name} ({latest})" + line = f"{name_latest:{name_column_width}} - {summary}" + try: + write_output(line) + print_dist_installation_info(name, latest) + except UnicodeEncodeError: + pass + + +def highest_version(versions: List[str]) -> str: + return max(versions, key=parse_version) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/show.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/show.py new file mode 100644 index 0000000..872292a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/show.py @@ -0,0 +1,235 @@ +import csv +import logging +import pathlib +from optparse import Values +from typing import Iterator, List, NamedTuple, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.metadata import BaseDistribution, get_default_environment +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class ShowCommand(Command): + """ + Show information about one or more installed packages. + + The output is in RFC-compliant mail header format. + """ + + usage = """ + %prog [options] ...""" + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-f", + "--files", + dest="files", + action="store_true", + default=False, + help="Show the full list of installed files for each package.", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + if not args: + logger.warning("ERROR: Please provide a package name or names.") + return ERROR + query = args + + results = search_packages_info(query) + if not print_results( + results, list_files=options.files, verbose=options.verbose + ): + return ERROR + return SUCCESS + + +class _PackageInfo(NamedTuple): + name: str + version: str + location: str + requires: List[str] + required_by: List[str] + installer: str + metadata_version: str + classifiers: List[str] + summary: str + homepage: str + author: str + author_email: str + license: str + entry_points: List[str] + files: Optional[List[str]] + + +def _convert_legacy_entry(entry: Tuple[str, ...], info: Tuple[str, ...]) -> str: + """Convert a legacy installed-files.txt path into modern RECORD path. + + The legacy format stores paths relative to the info directory, while the + modern format stores paths relative to the package root, e.g. the + site-packages directory. + + :param entry: Path parts of the installed-files.txt entry. + :param info: Path parts of the egg-info directory relative to package root. + :returns: The converted entry. + + For best compatibility with symlinks, this does not use ``abspath()`` or + ``Path.resolve()``, but tries to work with path parts: + + 1. While ``entry`` starts with ``..``, remove the equal amounts of parts + from ``info``; if ``info`` is empty, start appending ``..`` instead. + 2. Join the two directly. + """ + while entry and entry[0] == "..": + if not info or info[-1] == "..": + info += ("..",) + else: + info = info[:-1] + entry = entry[1:] + return str(pathlib.Path(*info, *entry)) + + +def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]: + """ + Gather details from installed distributions. Print distribution name, + version, location, and installed files. Installed files requires a + pip generated 'installed-files.txt' in the distributions '.egg-info' + directory. + """ + env = get_default_environment() + + installed = {dist.canonical_name: dist for dist in env.iter_distributions()} + query_names = [canonicalize_name(name) for name in query] + missing = sorted( + [name for name, pkg in zip(query, query_names) if pkg not in installed] + ) + if missing: + logger.warning("Package(s) not found: %s", ", ".join(missing)) + + def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: + return ( + dist.metadata["Name"] or "UNKNOWN" + for dist in installed.values() + if current_dist.canonical_name + in {canonicalize_name(d.name) for d in dist.iter_dependencies()} + ) + + def _files_from_record(dist: BaseDistribution) -> Optional[Iterator[str]]: + try: + text = dist.read_text("RECORD") + except FileNotFoundError: + return None + # This extra Path-str cast normalizes entries. + return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines())) + + def _files_from_legacy(dist: BaseDistribution) -> Optional[Iterator[str]]: + try: + text = dist.read_text("installed-files.txt") + except FileNotFoundError: + return None + paths = (p for p in text.splitlines(keepends=False) if p) + root = dist.location + info = dist.info_directory + if root is None or info is None: + return paths + try: + info_rel = pathlib.Path(info).relative_to(root) + except ValueError: # info is not relative to root. + return paths + if not info_rel.parts: # info *is* root. + return paths + return ( + _convert_legacy_entry(pathlib.Path(p).parts, info_rel.parts) for p in paths + ) + + for query_name in query_names: + try: + dist = installed[query_name] + except KeyError: + continue + + requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower) + required_by = sorted(_get_requiring_packages(dist), key=str.lower) + + try: + entry_points_text = dist.read_text("entry_points.txt") + entry_points = entry_points_text.splitlines(keepends=False) + except FileNotFoundError: + entry_points = [] + + files_iter = _files_from_record(dist) or _files_from_legacy(dist) + if files_iter is None: + files: Optional[List[str]] = None + else: + files = sorted(files_iter) + + metadata = dist.metadata + + yield _PackageInfo( + name=dist.raw_name, + version=str(dist.version), + location=dist.location or "", + requires=requires, + required_by=required_by, + installer=dist.installer, + metadata_version=dist.metadata_version or "", + classifiers=metadata.get_all("Classifier", []), + summary=metadata.get("Summary", ""), + homepage=metadata.get("Home-page", ""), + author=metadata.get("Author", ""), + author_email=metadata.get("Author-email", ""), + license=metadata.get("License", ""), + entry_points=entry_points, + files=files, + ) + + +def print_results( + distributions: Iterator[_PackageInfo], + list_files: bool, + verbose: bool, +) -> bool: + """ + Print the information from installed distributions found. + """ + results_printed = False + for i, dist in enumerate(distributions): + results_printed = True + if i > 0: + write_output("---") + + write_output("Name: %s", dist.name) + write_output("Version: %s", dist.version) + write_output("Summary: %s", dist.summary) + write_output("Home-page: %s", dist.homepage) + write_output("Author: %s", dist.author) + write_output("Author-email: %s", dist.author_email) + write_output("License: %s", dist.license) + write_output("Location: %s", dist.location) + write_output("Requires: %s", ", ".join(dist.requires)) + write_output("Required-by: %s", ", ".join(dist.required_by)) + + if verbose: + write_output("Metadata-Version: %s", dist.metadata_version) + write_output("Installer: %s", dist.installer) + write_output("Classifiers:") + for classifier in dist.classifiers: + write_output(" %s", classifier) + write_output("Entry-points:") + for entry in dist.entry_points: + write_output(" %s", entry.strip()) + if list_files: + write_output("Files:") + if dist.files is None: + write_output("Cannot locate RECORD or installed-files.txt") + else: + for line in dist.files: + write_output(" %s", line.strip()) + return results_printed diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/uninstall.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/uninstall.py new file mode 100644 index 0000000..bb9e8e6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/uninstall.py @@ -0,0 +1,105 @@ +import logging +from optparse import Values +from typing import List + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import InstallationError +from pip._internal.req import parse_requirements +from pip._internal.req.constructors import ( + install_req_from_line, + install_req_from_parsed_requirement, +) +from pip._internal.utils.misc import protect_pip_from_modification_on_windows + +logger = logging.getLogger(__name__) + + +class UninstallCommand(Command, SessionCommandMixin): + """ + Uninstall packages. + + pip is able to uninstall most installed packages. Known exceptions are: + + - Pure distutils packages installed with ``python setup.py install``, which + leave behind no metadata to determine what files were installed. + - Script wrappers installed by ``python setup.py develop``. + """ + + usage = """ + %prog [options] ... + %prog [options] -r ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help=( + "Uninstall all the packages listed in the given requirements " + "file. This option can be used multiple times." + ), + ) + self.cmd_opts.add_option( + "-y", + "--yes", + dest="yes", + action="store_true", + help="Don't ask for confirmation of uninstall deletions.", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + session = self.get_default_session(options) + + reqs_to_uninstall = {} + for name in args: + req = install_req_from_line( + name, + isolated=options.isolated_mode, + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + else: + logger.warning( + "Invalid requirement: %r ignored -" + " the uninstall command expects named" + " requirements.", + name, + ) + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, options=options, session=session + ): + req = install_req_from_parsed_requirement( + parsed_req, isolated=options.isolated_mode + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + if not reqs_to_uninstall: + raise InstallationError( + f"You must give at least one requirement to {self.name} (see " + f'"pip help {self.name}")' + ) + + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) + + for req in reqs_to_uninstall.values(): + uninstall_pathset = req.uninstall( + auto_confirm=options.yes, + verbose=self.verbosity > 0, + ) + if uninstall_pathset: + uninstall_pathset.commit() + + warn_if_run_as_root() + return SUCCESS diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/commands/wheel.py b/.venv/lib/python3.9/site-packages/pip/_internal/commands/wheel.py new file mode 100644 index 0000000..cea81ee --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/commands/wheel.py @@ -0,0 +1,177 @@ +import logging +import os +import shutil +from optparse import Values +from typing import List + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.wheel_builder import build, should_build_for_wheel_command + +logger = logging.getLogger(__name__) + + +class WheelCommand(RequirementCommand): + """ + Build Wheel archives for your requirements and dependencies. + + Wheel is a built-package format, and offers the advantage of not + recompiling your software during every install. For more details, see the + wheel docs: https://wheel.readthedocs.io/en/latest/ + + Requirements: setuptools>=0.8, and wheel. + + 'pip wheel' uses the bdist_wheel setuptools extension from the wheel + package to build individual wheels. + + """ + + usage = """ + %prog [options] ... + %prog [options] -r ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + def add_options(self) -> None: + + self.cmd_opts.add_option( + "-w", + "--wheel-dir", + dest="wheel_dir", + metavar="dir", + default=os.curdir, + help=( + "Build wheels into , where the default is the " + "current working directory." + ), + ) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + self.cmd_opts.add_option( + "--no-verify", + dest="no_verify", + action="store_true", + default=False, + help="Don't verify if built wheel is valid.", + ) + + self.cmd_opts.add_option(cmdoptions.build_options()) + self.cmd_opts.add_option(cmdoptions.global_options()) + + self.cmd_opts.add_option( + "--pre", + action="store_true", + default=False, + help=( + "Include pre-release and development versions. By default, " + "pip only finds stable versions." + ), + ) + + self.cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: List[str]) -> int: + cmdoptions.check_install_build_global(options) + + session = self.get_default_session(options) + + finder = self._build_package_finder(options, session) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + options.wheel_dir = normalize_path(options.wheel_dir) + ensure_dir(options.wheel_dir) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="wheel", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + download_dir=options.wheel_dir, + use_user_site=False, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + ignore_requires_python=options.ignore_requires_python, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + + reqs_to_build: List[InstallRequirement] = [] + for req in requirement_set.requirements.values(): + if req.is_wheel: + preparer.save_linked_requirement(req) + elif should_build_for_wheel_command(req): + reqs_to_build.append(req) + + # build wheels + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=(not options.no_verify), + build_options=options.build_options or [], + global_options=options.global_options or [], + ) + for req in build_successes: + assert req.link and req.link.is_wheel + assert req.local_file_path + # copy from cache to target directory + try: + shutil.copy(req.local_file_path, options.wheel_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, + e, + ) + build_failures.append(req) + if len(build_failures) != 0: + raise CommandError("Failed to build one or more wheels") + + return SUCCESS diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/configuration.py b/.venv/lib/python3.9/site-packages/pip/_internal/configuration.py new file mode 100644 index 0000000..4c3a362 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/configuration.py @@ -0,0 +1,367 @@ +"""Configuration management setup + +Some terminology: +- name + As written in config files. +- value + Value associated with a name +- key + Name combined with it's section (section.name) +- variant + A single word describing where the configuration key-value pair came from +""" + +import configparser +import locale +import os +import sys +from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple + +from pip._internal.exceptions import ( + ConfigurationError, + ConfigurationFileCouldNotBeLoaded, +) +from pip._internal.utils import appdirs +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ensure_dir, enum + +RawConfigParser = configparser.RawConfigParser # Shorthand +Kind = NewType("Kind", str) + +CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf" +ENV_NAMES_IGNORED = "version", "help" + +# The kinds of configurations there are. +kinds = enum( + USER="user", # User Specific + GLOBAL="global", # System Wide + SITE="site", # [Virtual] Environment Specific + ENV="env", # from PIP_CONFIG_FILE + ENV_VAR="env-var", # from Environment Variables +) +OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR +VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE + +logger = getLogger(__name__) + + +# NOTE: Maybe use the optionx attribute to normalize keynames. +def _normalize_name(name: str) -> str: + """Make a name consistent regardless of source (environment or file)""" + name = name.lower().replace("_", "-") + if name.startswith("--"): + name = name[2:] # only prefer long opts + return name + + +def _disassemble_key(name: str) -> List[str]: + if "." not in name: + error_message = ( + "Key does not contain dot separated section and key. " + "Perhaps you wanted to use 'global.{}' instead?" + ).format(name) + raise ConfigurationError(error_message) + return name.split(".", 1) + + +def get_configuration_files() -> Dict[Kind, List[str]]: + global_config_files = [ + os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip") + ] + + site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) + legacy_config_file = os.path.join( + os.path.expanduser("~"), + "pip" if WINDOWS else ".pip", + CONFIG_BASENAME, + ) + new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME) + return { + kinds.GLOBAL: global_config_files, + kinds.SITE: [site_config_file], + kinds.USER: [legacy_config_file, new_config_file], + } + + +class Configuration: + """Handles management of configuration. + + Provides an interface to accessing and managing configuration files. + + This class converts provides an API that takes "section.key-name" style + keys and stores the value associated with it as "key-name" under the + section "section". + + This allows for a clean interface wherein the both the section and the + key-name are preserved in an easy to manage form in the configuration files + and the data stored is also nice. + """ + + def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None: + super().__init__() + + if load_only is not None and load_only not in VALID_LOAD_ONLY: + raise ConfigurationError( + "Got invalid value for load_only - should be one of {}".format( + ", ".join(map(repr, VALID_LOAD_ONLY)) + ) + ) + self.isolated = isolated + self.load_only = load_only + + # Because we keep track of where we got the data from + self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = { + variant: [] for variant in OVERRIDE_ORDER + } + self._config: Dict[Kind, Dict[str, Any]] = { + variant: {} for variant in OVERRIDE_ORDER + } + self._modified_parsers: List[Tuple[str, RawConfigParser]] = [] + + def load(self) -> None: + """Loads configuration from configuration files and environment""" + self._load_config_files() + if not self.isolated: + self._load_environment_vars() + + def get_file_to_edit(self) -> Optional[str]: + """Returns the file with highest priority in configuration""" + assert self.load_only is not None, "Need to be specified a file to be editing" + + try: + return self._get_parser_to_modify()[0] + except IndexError: + return None + + def items(self) -> Iterable[Tuple[str, Any]]: + """Returns key-value pairs like dict.items() representing the loaded + configuration + """ + return self._dictionary.items() + + def get_value(self, key: str) -> Any: + """Get a value from the configuration.""" + try: + return self._dictionary[key] + except KeyError: + raise ConfigurationError(f"No such key - {key}") + + def set_value(self, key: str, value: Any) -> None: + """Modify a value in the configuration.""" + self._ensure_have_load_only() + + assert self.load_only + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + + # Modify the parser and the configuration + if not parser.has_section(section): + parser.add_section(section) + parser.set(section, name, value) + + self._config[self.load_only][key] = value + self._mark_as_modified(fname, parser) + + def unset_value(self, key: str) -> None: + """Unset a value in the configuration.""" + self._ensure_have_load_only() + + assert self.load_only + if key not in self._config[self.load_only]: + raise ConfigurationError(f"No such key - {key}") + + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + if not ( + parser.has_section(section) and parser.remove_option(section, name) + ): + # The option was not removed. + raise ConfigurationError( + "Fatal Internal error [id=1]. Please report as a bug." + ) + + # The section may be empty after the option was removed. + if not parser.items(section): + parser.remove_section(section) + self._mark_as_modified(fname, parser) + + del self._config[self.load_only][key] + + def save(self) -> None: + """Save the current in-memory state.""" + self._ensure_have_load_only() + + for fname, parser in self._modified_parsers: + logger.info("Writing to %s", fname) + + # Ensure directory exists. + ensure_dir(os.path.dirname(fname)) + + with open(fname, "w") as f: + parser.write(f) + + # + # Private routines + # + + def _ensure_have_load_only(self) -> None: + if self.load_only is None: + raise ConfigurationError("Needed a specific file to be modifying.") + logger.debug("Will be working with %s variant only", self.load_only) + + @property + def _dictionary(self) -> Dict[str, Any]: + """A dictionary representing the loaded configuration.""" + # NOTE: Dictionaries are not populated if not loaded. So, conditionals + # are not needed here. + retval = {} + + for variant in OVERRIDE_ORDER: + retval.update(self._config[variant]) + + return retval + + def _load_config_files(self) -> None: + """Loads configuration from configuration files""" + config_files = dict(self.iter_config_files()) + if config_files[kinds.ENV][0:1] == [os.devnull]: + logger.debug( + "Skipping loading configuration files due to " + "environment's PIP_CONFIG_FILE being os.devnull" + ) + return + + for variant, files in config_files.items(): + for fname in files: + # If there's specific variant set in `load_only`, load only + # that variant, not the others. + if self.load_only is not None and variant != self.load_only: + logger.debug("Skipping file '%s' (variant: %s)", fname, variant) + continue + + parser = self._load_file(variant, fname) + + # Keeping track of the parsers used + self._parsers[variant].append((fname, parser)) + + def _load_file(self, variant: Kind, fname: str) -> RawConfigParser: + logger.verbose("For variant '%s', will try loading '%s'", variant, fname) + parser = self._construct_parser(fname) + + for section in parser.sections(): + items = parser.items(section) + self._config[variant].update(self._normalized_keys(section, items)) + + return parser + + def _construct_parser(self, fname: str) -> RawConfigParser: + parser = configparser.RawConfigParser() + # If there is no such file, don't bother reading it but create the + # parser anyway, to hold the data. + # Doing this is useful when modifying and saving files, where we don't + # need to construct a parser. + if os.path.exists(fname): + try: + parser.read(fname) + except UnicodeDecodeError: + # See https://github.com/pypa/pip/issues/4963 + raise ConfigurationFileCouldNotBeLoaded( + reason="contains invalid {} characters".format( + locale.getpreferredencoding(False) + ), + fname=fname, + ) + except configparser.Error as error: + # See https://github.com/pypa/pip/issues/4893 + raise ConfigurationFileCouldNotBeLoaded(error=error) + return parser + + def _load_environment_vars(self) -> None: + """Loads configuration from environment variables""" + self._config[kinds.ENV_VAR].update( + self._normalized_keys(":env:", self.get_environ_vars()) + ) + + def _normalized_keys( + self, section: str, items: Iterable[Tuple[str, Any]] + ) -> Dict[str, Any]: + """Normalizes items to construct a dictionary with normalized keys. + + This routine is where the names become keys and are made the same + regardless of source - configuration files or environment. + """ + normalized = {} + for name, val in items: + key = section + "." + _normalize_name(name) + normalized[key] = val + return normalized + + def get_environ_vars(self) -> Iterable[Tuple[str, str]]: + """Returns a generator with all environmental vars with prefix PIP_""" + for key, val in os.environ.items(): + if key.startswith("PIP_"): + name = key[4:].lower() + if name not in ENV_NAMES_IGNORED: + yield name, val + + # XXX: This is patched in the tests. + def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: + """Yields variant and configuration files associated with it. + + This should be treated like items of a dictionary. + """ + # SMELL: Move the conditions out of this function + + # environment variables have the lowest priority + config_file = os.environ.get("PIP_CONFIG_FILE", None) + if config_file is not None: + yield kinds.ENV, [config_file] + else: + yield kinds.ENV, [] + + config_files = get_configuration_files() + + # at the base we have any global configuration + yield kinds.GLOBAL, config_files[kinds.GLOBAL] + + # per-user configuration next + should_load_user_config = not self.isolated and not ( + config_file and os.path.exists(config_file) + ) + if should_load_user_config: + # The legacy config file is overridden by the new config file + yield kinds.USER, config_files[kinds.USER] + + # finally virtualenv configuration first trumping others + yield kinds.SITE, config_files[kinds.SITE] + + def get_values_in_config(self, variant: Kind) -> Dict[str, Any]: + """Get values present in a config file""" + return self._config[variant] + + def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]: + # Determine which parser to modify + assert self.load_only + parsers = self._parsers[self.load_only] + if not parsers: + # This should not happen if everything works correctly. + raise ConfigurationError( + "Fatal Internal error [id=2]. Please report as a bug." + ) + + # Use the highest priority parser. + return parsers[-1] + + # XXX: This is patched in the tests. + def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None: + file_parser_tuple = (fname, parser) + if file_parser_tuple not in self._modified_parsers: + self._modified_parsers.append(file_parser_tuple) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._dictionary!r})" diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__init__.py new file mode 100644 index 0000000..9a89a83 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__init__.py @@ -0,0 +1,21 @@ +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.distributions.sdist import SourceDistribution +from pip._internal.distributions.wheel import WheelDistribution +from pip._internal.req.req_install import InstallRequirement + + +def make_distribution_for_install_requirement( + install_req: InstallRequirement, +) -> AbstractDistribution: + """Returns a Distribution for the given InstallRequirement""" + # Editable requirements will always be source distributions. They use the + # legacy logic until we create a modern standard for them. + if install_req.editable: + return SourceDistribution(install_req) + + # If it's a wheel, it's a WheelDistribution + if install_req.is_wheel: + return WheelDistribution(install_req) + + # Otherwise, a SourceDistribution + return SourceDistribution(install_req) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..2258cbd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..a8c3349 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-39.pyc new file mode 100644 index 0000000..5ccf6d7 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc new file mode 100644 index 0000000..37d6881 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc new file mode 100644 index 0000000..94aaff6 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/distributions/base.py b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/base.py new file mode 100644 index 0000000..149fff5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/base.py @@ -0,0 +1,36 @@ +import abc + +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata.base import BaseDistribution +from pip._internal.req import InstallRequirement + + +class AbstractDistribution(metaclass=abc.ABCMeta): + """A base class for handling installable artifacts. + + The requirements for anything installable are as follows: + + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + + - we must be able to create a Distribution object exposing the + above metadata. + """ + + def __init__(self, req: InstallRequirement) -> None: + super().__init__() + self.req = req + + @abc.abstractmethod + def get_metadata_distribution(self) -> BaseDistribution: + raise NotImplementedError() + + @abc.abstractmethod + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: + raise NotImplementedError() diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/distributions/installed.py b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/installed.py new file mode 100644 index 0000000..6c8c179 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/installed.py @@ -0,0 +1,22 @@ +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution + + +class InstalledDistribution(AbstractDistribution): + """Represents an installed package. + + This does not need any preparation as the required information has already + been computed. + """ + + def get_metadata_distribution(self) -> BaseDistribution: + from pip._internal.metadata.pkg_resources import Distribution as _Dist + + assert self.req.satisfied_by is not None, "not actually installed" + return _Dist(self.req.satisfied_by) + + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: + pass diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py new file mode 100644 index 0000000..cd85ac5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py @@ -0,0 +1,129 @@ +import logging +from typing import Iterable, Set, Tuple + +from pip._internal.build_env import BuildEnvironment +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.exceptions import InstallationError +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +class SourceDistribution(AbstractDistribution): + """Represents a source distribution. + + The preparation step for these needs metadata for the packages to be + generated, either using PEP 517 or using the legacy `setup.py egg_info`. + """ + + def get_metadata_distribution(self) -> BaseDistribution: + from pip._internal.metadata.pkg_resources import Distribution as _Dist + + return _Dist(self.req.get_dist()) + + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: + # Load pyproject.toml, to determine whether PEP 517 is to be used + self.req.load_pyproject_toml() + + # Set up the build isolation, if this requirement should be isolated + should_isolate = self.req.use_pep517 and build_isolation + if should_isolate: + # Setup an isolated environment and install the build backend static + # requirements in it. + self._prepare_build_backend(finder) + # Check that if the requirement is editable, it either supports PEP 660 or + # has a setup.py or a setup.cfg. This cannot be done earlier because we need + # to setup the build backend to verify it supports build_editable, nor can + # it be done later, because we want to avoid installing build requirements + # needlessly. Doing it here also works around setuptools generating + # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory + # without setup.py nor setup.cfg. + self.req.isolated_editable_sanity_check() + # Install the dynamic build requirements. + self._install_build_reqs(finder) + + self.req.prepare_metadata() + + def _prepare_build_backend(self, finder: PackageFinder) -> None: + # Isolate in a BuildEnvironment and install the build-time + # requirements. + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + + self.req.build_env = BuildEnvironment() + self.req.build_env.install_requirements( + finder, pyproject_requires, "overlay", "Installing build dependencies" + ) + conflicting, missing = self.req.build_env.check_requirements( + self.req.requirements_to_check + ) + if conflicting: + self._raise_conflicts("PEP 517/518 supported requirements", conflicting) + if missing: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "The project does not specify a build backend, and " + "pip cannot fall back to setuptools without %s.", + " and ".join(map(repr, sorted(missing))), + ) + + def _get_build_requires_wheel(self) -> Iterable[str]: + with self.req.build_env: + runner = runner_with_spinner_message("Getting requirements to build wheel") + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + return backend.get_requires_for_build_wheel() + + def _get_build_requires_editable(self) -> Iterable[str]: + with self.req.build_env: + runner = runner_with_spinner_message( + "Getting requirements to build editable" + ) + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + return backend.get_requires_for_build_editable() + + def _install_build_reqs(self, finder: PackageFinder) -> None: + # Install any extra build dependencies that the backend requests. + # This must be done in a second pass, as the pyproject.toml + # dependencies must be installed before we can call the backend. + if ( + self.req.editable + and self.req.permit_editable_wheels + and self.req.supports_pyproject_editable() + ): + build_reqs = self._get_build_requires_editable() + else: + build_reqs = self._get_build_requires_wheel() + conflicting, missing = self.req.build_env.check_requirements(build_reqs) + if conflicting: + self._raise_conflicts("the backend dependencies", conflicting) + self.req.build_env.install_requirements( + finder, missing, "normal", "Installing backend dependencies" + ) + + def _raise_conflicts( + self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]] + ) -> None: + format_string = ( + "Some build dependencies for {requirement} " + "conflict with {conflicting_with}: {description}." + ) + error_message = format_string.format( + requirement=self.req, + conflicting_with=conflicting_with, + description=", ".join( + f"{installed} is incompatible with {wanted}" + for installed, wanted in sorted(conflicting_reqs) + ), + ) + raise InstallationError(error_message) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/distributions/wheel.py b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/wheel.py new file mode 100644 index 0000000..340b0f3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/distributions/wheel.py @@ -0,0 +1,31 @@ +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import ( + BaseDistribution, + FilesystemWheel, + get_wheel_distribution, +) + + +class WheelDistribution(AbstractDistribution): + """Represents a wheel distribution. + + This does not need any preparation as wheels can be directly unpacked. + """ + + def get_metadata_distribution(self) -> BaseDistribution: + """Loads the metadata from the wheel file into memory and returns a + Distribution that uses it, not relying on the wheel file or + requirement. + """ + assert self.req.local_file_path, "Set as part of preparation during download" + assert self.req.name, "Wheels are never unnamed" + wheel = FilesystemWheel(self.req.local_file_path) + return get_wheel_distribution(wheel, canonicalize_name(self.req.name)) + + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: + pass diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/exceptions.py b/.venv/lib/python3.9/site-packages/pip/_internal/exceptions.py new file mode 100644 index 0000000..ef5bc75 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/exceptions.py @@ -0,0 +1,402 @@ +"""Exceptions used throughout package""" + +import configparser +from itertools import chain, groupby, repeat +from typing import TYPE_CHECKING, Dict, List, Optional, Union + +from pip._vendor.pkg_resources import Distribution +from pip._vendor.requests.models import Request, Response + +if TYPE_CHECKING: + from hashlib import _Hash + + from pip._internal.metadata import BaseDistribution + from pip._internal.req.req_install import InstallRequirement + + +class PipError(Exception): + """Base pip exception""" + + +class ConfigurationError(PipError): + """General exception in configuration""" + + +class InstallationError(PipError): + """General exception during installation""" + + +class UninstallationError(PipError): + """General exception during uninstallation""" + + +class NoneMetadataError(PipError): + """ + Raised when accessing "METADATA" or "PKG-INFO" metadata for a + pip._vendor.pkg_resources.Distribution object and + `dist.has_metadata('METADATA')` returns True but + `dist.get_metadata('METADATA')` returns None (and similarly for + "PKG-INFO"). + """ + + def __init__( + self, + dist: Union[Distribution, "BaseDistribution"], + metadata_name: str, + ) -> None: + """ + :param dist: A Distribution object. + :param metadata_name: The name of the metadata being accessed + (can be "METADATA" or "PKG-INFO"). + """ + self.dist = dist + self.metadata_name = metadata_name + + def __str__(self) -> str: + # Use `dist` in the error message because its stringification + # includes more information, like the version and location. + return "None {} metadata found for distribution: {}".format( + self.metadata_name, + self.dist, + ) + + +class UserInstallationInvalid(InstallationError): + """A --user install is requested on an environment without user site.""" + + def __str__(self) -> str: + return "User base directory is not specified" + + +class InvalidSchemeCombination(InstallationError): + def __str__(self) -> str: + before = ", ".join(str(a) for a in self.args[:-1]) + return f"Cannot set {before} and {self.args[-1]} together" + + +class DistributionNotFound(InstallationError): + """Raised when a distribution cannot be found to satisfy a requirement""" + + +class RequirementsFileParseError(InstallationError): + """Raised when a general error occurs parsing a requirements file line.""" + + +class BestVersionAlreadyInstalled(PipError): + """Raised when the most up-to-date version of a package is already + installed.""" + + +class BadCommand(PipError): + """Raised when virtualenv or a command is not found""" + + +class CommandError(PipError): + """Raised when there is an error in command-line arguments""" + + +class PreviousBuildDirError(PipError): + """Raised when there's a previous conflicting build directory""" + + +class NetworkConnectionError(PipError): + """HTTP connection error""" + + def __init__( + self, error_msg: str, response: Response = None, request: Request = None + ) -> None: + """ + Initialize NetworkConnectionError with `request` and `response` + objects. + """ + self.response = response + self.request = request + self.error_msg = error_msg + if ( + self.response is not None + and not self.request + and hasattr(response, "request") + ): + self.request = self.response.request + super().__init__(error_msg, response, request) + + def __str__(self) -> str: + return str(self.error_msg) + + +class InvalidWheelFilename(InstallationError): + """Invalid wheel filename.""" + + +class UnsupportedWheel(InstallationError): + """Unsupported wheel.""" + + +class MetadataInconsistent(InstallationError): + """Built metadata contains inconsistent information. + + This is raised when the metadata contains values (e.g. name and version) + that do not match the information previously obtained from sdist filename + or user-supplied ``#egg=`` value. + """ + + def __init__( + self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str + ) -> None: + self.ireq = ireq + self.field = field + self.f_val = f_val + self.m_val = m_val + + def __str__(self) -> str: + template = ( + "Requested {} has inconsistent {}: " + "filename has {!r}, but metadata has {!r}" + ) + return template.format(self.ireq, self.field, self.f_val, self.m_val) + + +class InstallationSubprocessError(InstallationError): + """A subprocess call failed during installation.""" + + def __init__(self, returncode: int, description: str) -> None: + self.returncode = returncode + self.description = description + + def __str__(self) -> str: + return ( + "Command errored out with exit status {}: {} " + "Check the logs for full command output." + ).format(self.returncode, self.description) + + +class HashErrors(InstallationError): + """Multiple HashError instances rolled into one for reporting""" + + def __init__(self) -> None: + self.errors: List["HashError"] = [] + + def append(self, error: "HashError") -> None: + self.errors.append(error) + + def __str__(self) -> str: + lines = [] + self.errors.sort(key=lambda e: e.order) + for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): + lines.append(cls.head) + lines.extend(e.body() for e in errors_of_cls) + if lines: + return "\n".join(lines) + return "" + + def __bool__(self) -> bool: + return bool(self.errors) + + +class HashError(InstallationError): + """ + A failure to verify a package against known-good hashes + + :cvar order: An int sorting hash exception classes by difficulty of + recovery (lower being harder), so the user doesn't bother fretting + about unpinned packages when he has deeper issues, like VCS + dependencies, to deal with. Also keeps error reports in a + deterministic order. + :cvar head: A section heading for display above potentially many + exceptions of this kind + :ivar req: The InstallRequirement that triggered this error. This is + pasted on after the exception is instantiated, because it's not + typically available earlier. + + """ + + req: Optional["InstallRequirement"] = None + head = "" + order: int = -1 + + def body(self) -> str: + """Return a summary of me for display under the heading. + + This default implementation simply prints a description of the + triggering requirement. + + :param req: The InstallRequirement that provoked this error, with + its link already populated by the resolver's _populate_link(). + + """ + return f" {self._requirement_name()}" + + def __str__(self) -> str: + return f"{self.head}\n{self.body()}" + + def _requirement_name(self) -> str: + """Return a description of the requirement that triggered me. + + This default implementation returns long description of the req, with + line numbers + + """ + return str(self.req) if self.req else "unknown package" + + +class VcsHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 0 + head = ( + "Can't verify hashes for these requirements because we don't " + "have a way to hash version control repositories:" + ) + + +class DirectoryUrlHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 1 + head = ( + "Can't verify hashes for these file:// requirements because they " + "point to directories:" + ) + + +class HashMissing(HashError): + """A hash was needed for a requirement but is absent.""" + + order = 2 + head = ( + "Hashes are required in --require-hashes mode, but they are " + "missing from some requirements. Here is a list of those " + "requirements along with the hashes their downloaded archives " + "actually had. Add lines like these to your requirements files to " + "prevent tampering. (If you did not enable --require-hashes " + "manually, note that it turns on automatically when any package " + "has a hash.)" + ) + + def __init__(self, gotten_hash: str) -> None: + """ + :param gotten_hash: The hash of the (possibly malicious) archive we + just downloaded + """ + self.gotten_hash = gotten_hash + + def body(self) -> str: + # Dodge circular import. + from pip._internal.utils.hashes import FAVORITE_HASH + + package = None + if self.req: + # In the case of URL-based requirements, display the original URL + # seen in the requirements file rather than the package name, + # so the output can be directly copied into the requirements file. + package = ( + self.req.original_link + if self.req.original_link + # In case someone feeds something downright stupid + # to InstallRequirement's constructor. + else getattr(self.req, "req", None) + ) + return " {} --hash={}:{}".format( + package or "unknown package", FAVORITE_HASH, self.gotten_hash + ) + + +class HashUnpinned(HashError): + """A requirement had a hash specified but was not pinned to a specific + version.""" + + order = 3 + head = ( + "In --require-hashes mode, all requirements must have their " + "versions pinned with ==. These do not:" + ) + + +class HashMismatch(HashError): + """ + Distribution file hash values don't match. + + :ivar package_name: The name of the package that triggered the hash + mismatch. Feel free to write to this after the exception is raise to + improve its error message. + + """ + + order = 4 + head = ( + "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS " + "FILE. If you have updated the package versions, please update " + "the hashes. Otherwise, examine the package contents carefully; " + "someone may have tampered with them." + ) + + def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None: + """ + :param allowed: A dict of algorithm names pointing to lists of allowed + hex digests + :param gots: A dict of algorithm names pointing to hashes we + actually got from the files under suspicion + """ + self.allowed = allowed + self.gots = gots + + def body(self) -> str: + return " {}:\n{}".format(self._requirement_name(), self._hash_comparison()) + + def _hash_comparison(self) -> str: + """ + Return a comparison of actual and expected hash values. + + Example:: + + Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde + or 123451234512345123451234512345123451234512345 + Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef + + """ + + def hash_then_or(hash_name: str) -> "chain[str]": + # For now, all the decent hashes have 6-char names, so we can get + # away with hard-coding space literals. + return chain([hash_name], repeat(" or")) + + lines: List[str] = [] + for hash_name, expecteds in self.allowed.items(): + prefix = hash_then_or(hash_name) + lines.extend( + (" Expected {} {}".format(next(prefix), e)) for e in expecteds + ) + lines.append( + " Got {}\n".format(self.gots[hash_name].hexdigest()) + ) + return "\n".join(lines) + + +class UnsupportedPythonVersion(InstallationError): + """Unsupported python version according to Requires-Python package + metadata.""" + + +class ConfigurationFileCouldNotBeLoaded(ConfigurationError): + """When there are errors while loading a configuration file""" + + def __init__( + self, + reason: str = "could not be loaded", + fname: Optional[str] = None, + error: Optional[configparser.Error] = None, + ) -> None: + super().__init__(error) + self.reason = reason + self.fname = fname + self.error = error + + def __str__(self) -> str: + if self.fname is not None: + message_part = f" in {self.fname}." + else: + assert self.error is not None + message_part = f".\n{self.error}\n" + return f"Configuration file {self.reason}{message_part}" diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/index/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/index/__init__.py new file mode 100644 index 0000000..7a17b7b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/index/__init__.py @@ -0,0 +1,2 @@ +"""Index interaction code +""" diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..afc296a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/collector.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/collector.cpython-39.pyc new file mode 100644 index 0000000..3242066 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/collector.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-39.pyc new file mode 100644 index 0000000..38b524b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/sources.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/sources.cpython-39.pyc new file mode 100644 index 0000000..f4c8ee9 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/sources.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/index/collector.py b/.venv/lib/python3.9/site-packages/pip/_internal/index/collector.py new file mode 100644 index 0000000..d941223 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/index/collector.py @@ -0,0 +1,536 @@ +""" +The main purpose of this module is to expose LinkCollector.collect_sources(). +""" + +import cgi +import collections +import functools +import itertools +import logging +import os +import re +import urllib.parse +import urllib.request +import xml.etree.ElementTree +from optparse import Values +from typing import ( + Callable, + Iterable, + List, + MutableMapping, + NamedTuple, + Optional, + Sequence, + Union, +) + +from pip._vendor import html5lib, requests +from pip._vendor.requests import Response +from pip._vendor.requests.exceptions import RetryError, SSLError + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import pairwise, redact_auth_from_url +from pip._internal.vcs import vcs + +from .sources import CandidatesFromPage, LinkSource, build_source + +logger = logging.getLogger(__name__) + +HTMLElement = xml.etree.ElementTree.Element +ResponseHeaders = MutableMapping[str, str] + + +def _match_vcs_scheme(url: str) -> Optional[str]: + """Look for VCS schemes in the URL. + + Returns the matched VCS scheme, or None if there's no match. + """ + for scheme in vcs.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in "+:": + return scheme + return None + + +class _NotHTML(Exception): + def __init__(self, content_type: str, request_desc: str) -> None: + super().__init__(content_type, request_desc) + self.content_type = content_type + self.request_desc = request_desc + + +def _ensure_html_header(response: Response) -> None: + """Check the Content-Type header to ensure the response contains HTML. + + Raises `_NotHTML` if the content type is not text/html. + """ + content_type = response.headers.get("Content-Type", "") + if not content_type.lower().startswith("text/html"): + raise _NotHTML(content_type, response.request.method) + + +class _NotHTTP(Exception): + pass + + +def _ensure_html_response(url: str, session: PipSession) -> None: + """Send a HEAD request to the URL, and ensure the response contains HTML. + + Raises `_NotHTTP` if the URL is not available for a HEAD request, or + `_NotHTML` if the content type is not text/html. + """ + scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url) + if scheme not in {"http", "https"}: + raise _NotHTTP() + + resp = session.head(url, allow_redirects=True) + raise_for_status(resp) + + _ensure_html_header(resp) + + +def _get_html_response(url: str, session: PipSession) -> Response: + """Access an HTML page with GET, and return the response. + + This consists of three parts: + + 1. If the URL looks suspiciously like an archive, send a HEAD first to + check the Content-Type is HTML, to avoid downloading a large file. + Raise `_NotHTTP` if the content type cannot be determined, or + `_NotHTML` if it is not HTML. + 2. Actually perform the request. Raise HTTP exceptions on network failures. + 3. Check the Content-Type header to make sure we got HTML, and raise + `_NotHTML` otherwise. + """ + if is_archive_file(Link(url).filename): + _ensure_html_response(url, session=session) + + logger.debug("Getting page %s", redact_auth_from_url(url)) + + resp = session.get( + url, + headers={ + "Accept": "text/html", + # We don't want to blindly returned cached data for + # /simple/, because authors generally expecting that + # twine upload && pip install will function, but if + # they've done a pip install in the last ~10 minutes + # it won't. Thus by setting this to zero we will not + # blindly use any cached data, however the benefit of + # using max-age=0 instead of no-cache, is that we will + # still support conditional requests, so we will still + # minimize traffic sent in cases where the page hasn't + # changed at all, we will just always incur the round + # trip for the conditional GET now instead of only + # once per 10 minutes. + # For more information, please see pypa/pip#5670. + "Cache-Control": "max-age=0", + }, + ) + raise_for_status(resp) + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. + _ensure_html_header(resp) + + return resp + + +def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]: + """Determine if we have any encoding information in our headers.""" + if headers and "Content-Type" in headers: + content_type, params = cgi.parse_header(headers["Content-Type"]) + if "charset" in params: + return params["charset"] + return None + + +def _determine_base_url(document: HTMLElement, page_url: str) -> str: + """Determine the HTML document's base URL. + + This looks for a ```` tag in the HTML document. If present, its href + attribute denotes the base URL of anchor tags in the document. If there is + no such tag (or if it does not have a valid href attribute), the HTML + file's URL is used as the base URL. + + :param document: An HTML document representation. The current + implementation expects the result of ``html5lib.parse()``. + :param page_url: The URL of the HTML document. + """ + for base in document.findall(".//base"): + href = base.get("href") + if href is not None: + return href + return page_url + + +def _clean_url_path_part(part: str) -> str: + """ + Clean a "part" of a URL path (i.e. after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + return urllib.parse.quote(urllib.parse.unquote(part)) + + +def _clean_file_url_path(part: str) -> str: + """ + Clean the first part of a URL path that corresponds to a local + filesystem path (i.e. the first part after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + # Also, on Windows the path part might contain a drive letter which + # should not be quoted. On Linux where drive letters do not + # exist, the colon should be quoted. We rely on urllib.request + # to do the right thing here. + return urllib.request.pathname2url(urllib.request.url2pathname(part)) + + +# percent-encoded: / +_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE) + + +def _clean_url_path(path: str, is_local_path: bool) -> str: + """ + Clean the path portion of a URL. + """ + if is_local_path: + clean_func = _clean_file_url_path + else: + clean_func = _clean_url_path_part + + # Split on the reserved characters prior to cleaning so that + # revision strings in VCS URLs are properly preserved. + parts = _reserved_chars_re.split(path) + + cleaned_parts = [] + for to_clean, reserved in pairwise(itertools.chain(parts, [""])): + cleaned_parts.append(clean_func(to_clean)) + # Normalize %xx escapes (e.g. %2f -> %2F) + cleaned_parts.append(reserved.upper()) + + return "".join(cleaned_parts) + + +def _clean_link(url: str) -> str: + """ + Make sure a link is fully quoted. + For example, if ' ' occurs in the URL, it will be replaced with "%20", + and without double-quoting other characters. + """ + # Split the URL into parts according to the general structure + # `scheme://netloc/path;parameters?query#fragment`. + result = urllib.parse.urlparse(url) + # If the netloc is empty, then the URL refers to a local filesystem path. + is_local_path = not result.netloc + path = _clean_url_path(result.path, is_local_path=is_local_path) + return urllib.parse.urlunparse(result._replace(path=path)) + + +def _create_link_from_element( + anchor: HTMLElement, + page_url: str, + base_url: str, +) -> Optional[Link]: + """ + Convert an anchor element in a simple repository page to a Link. + """ + href = anchor.get("href") + if not href: + return None + + url = _clean_link(urllib.parse.urljoin(base_url, href)) + pyrequire = anchor.get("data-requires-python") + yanked_reason = anchor.get("data-yanked") + + link = Link( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + ) + + return link + + +class CacheablePageContent: + def __init__(self, page: "HTMLPage") -> None: + assert page.cache_link_parsing + self.page = page + + def __eq__(self, other: object) -> bool: + return isinstance(other, type(self)) and self.page.url == other.page.url + + def __hash__(self) -> int: + return hash(self.page.url) + + +def with_cached_html_pages( + fn: Callable[["HTMLPage"], Iterable[Link]], +) -> Callable[["HTMLPage"], List[Link]]: + """ + Given a function that parses an Iterable[Link] from an HTMLPage, cache the + function's result (keyed by CacheablePageContent), unless the HTMLPage + `page` has `page.cache_link_parsing == False`. + """ + + @functools.lru_cache(maxsize=None) + def wrapper(cacheable_page: CacheablePageContent) -> List[Link]: + return list(fn(cacheable_page.page)) + + @functools.wraps(fn) + def wrapper_wrapper(page: "HTMLPage") -> List[Link]: + if page.cache_link_parsing: + return wrapper(CacheablePageContent(page)) + return list(fn(page)) + + return wrapper_wrapper + + +@with_cached_html_pages +def parse_links(page: "HTMLPage") -> Iterable[Link]: + """ + Parse an HTML document, and yield its anchor elements as Link objects. + """ + document = html5lib.parse( + page.content, + transport_encoding=page.encoding, + namespaceHTMLElements=False, + ) + + url = page.url + base_url = _determine_base_url(document, url) + for anchor in document.findall(".//a"): + link = _create_link_from_element( + anchor, + page_url=url, + base_url=base_url, + ) + if link is None: + continue + yield link + + +class HTMLPage: + """Represents one page, along with its URL""" + + def __init__( + self, + content: bytes, + encoding: Optional[str], + url: str, + cache_link_parsing: bool = True, + ) -> None: + """ + :param encoding: the encoding to decode the given content. + :param url: the URL from which the HTML was downloaded. + :param cache_link_parsing: whether links parsed from this page's url + should be cached. PyPI index urls should + have this set to False, for example. + """ + self.content = content + self.encoding = encoding + self.url = url + self.cache_link_parsing = cache_link_parsing + + def __str__(self) -> str: + return redact_auth_from_url(self.url) + + +def _handle_get_page_fail( + link: Link, + reason: Union[str, Exception], + meth: Optional[Callable[..., None]] = None, +) -> None: + if meth is None: + meth = logger.debug + meth("Could not fetch URL %s: %s - skipping", link, reason) + + +def _make_html_page(response: Response, cache_link_parsing: bool = True) -> HTMLPage: + encoding = _get_encoding_from_headers(response.headers) + return HTMLPage( + response.content, + encoding=encoding, + url=response.url, + cache_link_parsing=cache_link_parsing, + ) + + +def _get_html_page( + link: Link, session: Optional[PipSession] = None +) -> Optional["HTMLPage"]: + if session is None: + raise TypeError( + "_get_html_page() missing 1 required keyword argument: 'session'" + ) + + url = link.url.split("#", 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + vcs_scheme = _match_vcs_scheme(url) + if vcs_scheme: + logger.warning( + "Cannot look at %s URL %s because it does not support lookup as web pages.", + vcs_scheme, + link, + ) + return None + + # Tack index.html onto file:// URLs that point to directories + scheme, _, path, _, _, _ = urllib.parse.urlparse(url) + if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith("/"): + url += "/" + url = urllib.parse.urljoin(url, "index.html") + logger.debug(" file: URL is directory, getting %s", url) + + try: + resp = _get_html_response(url, session=session) + except _NotHTTP: + logger.warning( + "Skipping page %s because it looks like an archive, and cannot " + "be checked by a HTTP HEAD request.", + link, + ) + except _NotHTML as exc: + logger.warning( + "Skipping page %s because the %s request got Content-Type: %s." + "The only supported Content-Type is text/html", + link, + exc.request_desc, + exc.content_type, + ) + except NetworkConnectionError as exc: + _handle_get_page_fail(link, exc) + except RetryError as exc: + _handle_get_page_fail(link, exc) + except SSLError as exc: + reason = "There was a problem confirming the ssl certificate: " + reason += str(exc) + _handle_get_page_fail(link, reason, meth=logger.info) + except requests.ConnectionError as exc: + _handle_get_page_fail(link, f"connection error: {exc}") + except requests.Timeout: + _handle_get_page_fail(link, "timed out") + else: + return _make_html_page(resp, cache_link_parsing=link.cache_link_parsing) + return None + + +class CollectedSources(NamedTuple): + find_links: Sequence[Optional[LinkSource]] + index_urls: Sequence[Optional[LinkSource]] + + +class LinkCollector: + + """ + Responsible for collecting Link objects from all configured locations, + making network requests as needed. + + The class's main method is its collect_sources() method. + """ + + def __init__( + self, + session: PipSession, + search_scope: SearchScope, + ) -> None: + self.search_scope = search_scope + self.session = session + + @classmethod + def create( + cls, + session: PipSession, + options: Values, + suppress_no_index: bool = False, + ) -> "LinkCollector": + """ + :param session: The Session to use to make requests. + :param suppress_no_index: Whether to ignore the --no-index option + when constructing the SearchScope object. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index and not suppress_no_index: + logger.debug( + "Ignoring indexes: %s", + ",".join(redact_auth_from_url(url) for url in index_urls), + ) + index_urls = [] + + # Make sure find_links is a list before passing to create(). + find_links = options.find_links or [] + + search_scope = SearchScope.create( + find_links=find_links, + index_urls=index_urls, + ) + link_collector = LinkCollector( + session=session, + search_scope=search_scope, + ) + return link_collector + + @property + def find_links(self) -> List[str]: + return self.search_scope.find_links + + def fetch_page(self, location: Link) -> Optional[HTMLPage]: + """ + Fetch an HTML page containing package links. + """ + return _get_html_page(location, session=self.session) + + def collect_sources( + self, + project_name: str, + candidates_from_page: CandidatesFromPage, + ) -> CollectedSources: + # The OrderedDict calls deduplicate sources by URL. + index_url_sources = collections.OrderedDict( + build_source( + loc, + candidates_from_page=candidates_from_page, + page_validator=self.session.is_secure_origin, + expand_dir=False, + cache_link_parsing=False, + ) + for loc in self.search_scope.get_index_urls_locations(project_name) + ).values() + find_links_sources = collections.OrderedDict( + build_source( + loc, + candidates_from_page=candidates_from_page, + page_validator=self.session.is_secure_origin, + expand_dir=True, + cache_link_parsing=True, + ) + for loc in self.find_links + ).values() + + if logger.isEnabledFor(logging.DEBUG): + lines = [ + f"* {s.link}" + for s in itertools.chain(find_links_sources, index_url_sources) + if s is not None and s.link is not None + ] + lines = [ + f"{len(lines)} location(s) to search " + f"for versions of {project_name}:" + ] + lines + logger.debug("\n".join(lines)) + + return CollectedSources( + find_links=list(find_links_sources), + index_urls=list(index_url_sources), + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/index/package_finder.py b/.venv/lib/python3.9/site-packages/pip/_internal/index/package_finder.py new file mode 100644 index 0000000..a2702db --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/index/package_finder.py @@ -0,0 +1,993 @@ +"""Routines related to PyPI, indexes""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import functools +import itertools +import logging +import re +from typing import FrozenSet, Iterable, List, Optional, Set, Tuple, Union + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.tags import Tag +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import _BaseVersion +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + InvalidWheelFilename, + UnsupportedWheel, +) +from pip._internal.index.collector import LinkCollector, parse_links +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.models.wheel import Wheel +from pip._internal.req import InstallRequirement +from pip._internal.utils._log import getLogger +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import build_netloc +from pip._internal.utils.packaging import check_requires_python +from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS +from pip._internal.utils.urls import url_to_path + +__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"] + + +logger = getLogger(__name__) + +BuildTag = Union[Tuple[()], Tuple[int, str]] +CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag] + + +def _check_link_requires_python( + link: Link, + version_info: Tuple[int, int, int], + ignore_requires_python: bool = False, +) -> bool: + """ + Return whether the given Python version is compatible with a link's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + """ + try: + is_compatible = check_requires_python( + link.requires_python, + version_info=version_info, + ) + except specifiers.InvalidSpecifier: + logger.debug( + "Ignoring invalid Requires-Python (%r) for link: %s", + link.requires_python, + link, + ) + else: + if not is_compatible: + version = ".".join(map(str, version_info)) + if not ignore_requires_python: + logger.verbose( + "Link requires a different Python (%s not in: %r): %s", + version, + link.requires_python, + link, + ) + return False + + logger.debug( + "Ignoring failed Requires-Python check (%s not in: %r) for link: %s", + version, + link.requires_python, + link, + ) + + return True + + +class LinkEvaluator: + + """ + Responsible for evaluating links for a particular project. + """ + + _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$") + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + project_name: str, + canonical_name: str, + formats: FrozenSet[str], + target_python: TargetPython, + allow_yanked: bool, + ignore_requires_python: Optional[bool] = None, + ) -> None: + """ + :param project_name: The user supplied package name. + :param canonical_name: The canonical package name. + :param formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. + :param target_python: The target Python interpreter to use when + evaluating link compatibility. This is used, for example, to + check wheel compatibility, as well as when checking the Python + version, e.g. the Python version embedded in a link filename + (or egg fragment) and against an HTML link's optional PEP 503 + "data-requires-python" attribute. + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param ignore_requires_python: Whether to ignore incompatible + PEP 503 "data-requires-python" values in HTML links. Defaults + to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self._allow_yanked = allow_yanked + self._canonical_name = canonical_name + self._ignore_requires_python = ignore_requires_python + self._formats = formats + self._target_python = target_python + + self.project_name = project_name + + def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]: + """ + Determine whether a link is a candidate for installation. + + :return: A tuple (is_candidate, result), where `result` is (1) a + version string if `is_candidate` is True, and (2) if + `is_candidate` is False, an optional string to log the reason + the link fails to qualify. + """ + version = None + if link.is_yanked and not self._allow_yanked: + reason = link.yanked_reason or "" + return (False, f"yanked for reason: {reason}") + + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + return (False, "not a file") + if ext not in SUPPORTED_EXTENSIONS: + return (False, f"unsupported archive format: {ext}") + if "binary" not in self._formats and ext == WHEEL_EXTENSION: + reason = "No binaries permitted for {}".format(self.project_name) + return (False, reason) + if "macosx10" in link.path and ext == ".zip": + return (False, "macosx10 one") + if ext == WHEEL_EXTENSION: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + return (False, "invalid wheel filename") + if canonicalize_name(wheel.name) != self._canonical_name: + reason = "wrong project name (not {})".format(self.project_name) + return (False, reason) + + supported_tags = self._target_python.get_tags() + if not wheel.supported(supported_tags): + # Include the wheel's tags in the reason string to + # simplify troubleshooting compatibility issues. + file_tags = wheel.get_formatted_file_tags() + reason = ( + "none of the wheel's tags ({}) are compatible " + "(run pip debug --verbose to show compatible tags)".format( + ", ".join(file_tags) + ) + ) + return (False, reason) + + version = wheel.version + + # This should be up by the self.ok_binary check, but see issue 2700. + if "source" not in self._formats and ext != WHEEL_EXTENSION: + reason = f"No sources permitted for {self.project_name}" + return (False, reason) + + if not version: + version = _extract_version_from_fragment( + egg_info, + self._canonical_name, + ) + if not version: + reason = f"Missing project version for {self.project_name}" + return (False, reason) + + match = self._py_version_re.search(version) + if match: + version = version[: match.start()] + py_version = match.group(1) + if py_version != self._target_python.py_version: + return (False, "Python version is incorrect") + + supports_python = _check_link_requires_python( + link, + version_info=self._target_python.py_version_info, + ignore_requires_python=self._ignore_requires_python, + ) + if not supports_python: + # Return None for the reason text to suppress calling + # _log_skipped_link(). + return (False, None) + + logger.debug("Found link %s, version: %s", link, version) + + return (True, version) + + +def filter_unallowed_hashes( + candidates: List[InstallationCandidate], + hashes: Hashes, + project_name: str, +) -> List[InstallationCandidate]: + """ + Filter out candidates whose hashes aren't allowed, and return a new + list of candidates. + + If at least one candidate has an allowed hash, then all candidates with + either an allowed hash or no hash specified are returned. Otherwise, + the given candidates are returned. + + Including the candidates with no hash specified when there is a match + allows a warning to be logged if there is a more preferred candidate + with no hash specified. Returning all candidates in the case of no + matches lets pip report the hash of the candidate that would otherwise + have been installed (e.g. permitting the user to more easily update + their requirements file with the desired hash). + """ + if not hashes: + logger.debug( + "Given no hashes to check %s links for project %r: " + "discarding no candidates", + len(candidates), + project_name, + ) + # Make sure we're not returning back the given value. + return list(candidates) + + matches_or_no_digest = [] + # Collect the non-matches for logging purposes. + non_matches = [] + match_count = 0 + for candidate in candidates: + link = candidate.link + if not link.has_hash: + pass + elif link.is_hash_allowed(hashes=hashes): + match_count += 1 + else: + non_matches.append(candidate) + continue + + matches_or_no_digest.append(candidate) + + if match_count: + filtered = matches_or_no_digest + else: + # Make sure we're not returning back the given value. + filtered = list(candidates) + + if len(filtered) == len(candidates): + discard_message = "discarding no candidates" + else: + discard_message = "discarding {} non-matches:\n {}".format( + len(non_matches), + "\n ".join(str(candidate.link) for candidate in non_matches), + ) + + logger.debug( + "Checked %s links for project %r against %s hashes " + "(%s matches, %s no digest): %s", + len(candidates), + project_name, + hashes.digest_count, + match_count, + len(matches_or_no_digest) - match_count, + discard_message, + ) + + return filtered + + +class CandidatePreferences: + + """ + Encapsulates some of the preferences for filtering and sorting + InstallationCandidate objects. + """ + + def __init__( + self, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + ) -> None: + """ + :param allow_all_prereleases: Whether to allow all pre-releases. + """ + self.allow_all_prereleases = allow_all_prereleases + self.prefer_binary = prefer_binary + + +class BestCandidateResult: + """A collection of candidates, returned by `PackageFinder.find_best_candidate`. + + This class is only intended to be instantiated by CandidateEvaluator's + `compute_best_candidate()` method. + """ + + def __init__( + self, + candidates: List[InstallationCandidate], + applicable_candidates: List[InstallationCandidate], + best_candidate: Optional[InstallationCandidate], + ) -> None: + """ + :param candidates: A sequence of all available candidates found. + :param applicable_candidates: The applicable candidates. + :param best_candidate: The most preferred candidate found, or None + if no applicable candidates were found. + """ + assert set(applicable_candidates) <= set(candidates) + + if best_candidate is None: + assert not applicable_candidates + else: + assert best_candidate in applicable_candidates + + self._applicable_candidates = applicable_candidates + self._candidates = candidates + + self.best_candidate = best_candidate + + def iter_all(self) -> Iterable[InstallationCandidate]: + """Iterate through all candidates.""" + return iter(self._candidates) + + def iter_applicable(self) -> Iterable[InstallationCandidate]: + """Iterate through the applicable candidates.""" + return iter(self._applicable_candidates) + + +class CandidateEvaluator: + + """ + Responsible for filtering and sorting candidates for installation based + on what tags are valid. + """ + + @classmethod + def create( + cls, + project_name: str, + target_python: Optional[TargetPython] = None, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> "CandidateEvaluator": + """Create a CandidateEvaluator object. + + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + :param hashes: An optional collection of allowed hashes. + """ + if target_python is None: + target_python = TargetPython() + if specifier is None: + specifier = specifiers.SpecifierSet() + + supported_tags = target_python.get_tags() + + return cls( + project_name=project_name, + supported_tags=supported_tags, + specifier=specifier, + prefer_binary=prefer_binary, + allow_all_prereleases=allow_all_prereleases, + hashes=hashes, + ) + + def __init__( + self, + project_name: str, + supported_tags: List[Tag], + specifier: specifiers.BaseSpecifier, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + hashes: Optional[Hashes] = None, + ) -> None: + """ + :param supported_tags: The PEP 425 tags supported by the target + Python in order of preference (most preferred first). + """ + self._allow_all_prereleases = allow_all_prereleases + self._hashes = hashes + self._prefer_binary = prefer_binary + self._project_name = project_name + self._specifier = specifier + self._supported_tags = supported_tags + # Since the index of the tag in the _supported_tags list is used + # as a priority, precompute a map from tag to index/priority to be + # used in wheel.find_most_preferred_tag. + self._wheel_tag_preferences = { + tag: idx for idx, tag in enumerate(supported_tags) + } + + def get_applicable_candidates( + self, + candidates: List[InstallationCandidate], + ) -> List[InstallationCandidate]: + """ + Return the applicable candidates from a list of candidates. + """ + # Using None infers from the specifier instead. + allow_prereleases = self._allow_all_prereleases or None + specifier = self._specifier + versions = { + str(v) + for v in specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + (str(c.version) for c in candidates), + prereleases=allow_prereleases, + ) + } + + # Again, converting version to str to deal with debundling. + applicable_candidates = [c for c in candidates if str(c.version) in versions] + + filtered_applicable_candidates = filter_unallowed_hashes( + candidates=applicable_candidates, + hashes=self._hashes, + project_name=self._project_name, + ) + + return sorted(filtered_applicable_candidates, key=self._sort_key) + + def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey: + """ + Function to pass as the `key` argument to a call to sorted() to sort + InstallationCandidates by preference. + + Returns a tuple such that tuples sorting as greater using Python's + default comparison operator are more preferred. + + The preference is as follows: + + First and foremost, candidates with allowed (matching) hashes are + always preferred over candidates without matching hashes. This is + because e.g. if the only candidate with an allowed hash is yanked, + we still want to use that candidate. + + Second, excepting hash considerations, candidates that have been + yanked (in the sense of PEP 592) are always less preferred than + candidates that haven't been yanked. Then: + + If not finding wheels, they are sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min(self._supported_tags) + 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. + + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + valid_tags = self._supported_tags + support_num = len(valid_tags) + build_tag: BuildTag = () + binary_preference = 0 + link = candidate.link + if link.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(link.filename) + try: + pri = -( + wheel.find_most_preferred_tag( + valid_tags, self._wheel_tag_preferences + ) + ) + except ValueError: + raise UnsupportedWheel( + "{} is not a supported wheel for this platform. It " + "can't be sorted.".format(wheel.filename) + ) + if self._prefer_binary: + binary_preference = 1 + if wheel.build_tag is not None: + match = re.match(r"^(\d+)(.*)$", wheel.build_tag) + build_tag_groups = match.groups() + build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) + else: # sdist + pri = -(support_num) + has_allowed_hash = int(link.is_hash_allowed(self._hashes)) + yank_value = -1 * int(link.is_yanked) # -1 for yanked. + return ( + has_allowed_hash, + yank_value, + binary_preference, + candidate.version, + pri, + build_tag, + ) + + def sort_best_candidate( + self, + candidates: List[InstallationCandidate], + ) -> Optional[InstallationCandidate]: + """ + Return the best candidate per the instance's sort order, or None if + no candidate is acceptable. + """ + if not candidates: + return None + best_candidate = max(candidates, key=self._sort_key) + return best_candidate + + def compute_best_candidate( + self, + candidates: List[InstallationCandidate], + ) -> BestCandidateResult: + """ + Compute and return a `BestCandidateResult` instance. + """ + applicable_candidates = self.get_applicable_candidates(candidates) + + best_candidate = self.sort_best_candidate(applicable_candidates) + + return BestCandidateResult( + candidates, + applicable_candidates=applicable_candidates, + best_candidate=best_candidate, + ) + + +class PackageFinder: + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__( + self, + link_collector: LinkCollector, + target_python: TargetPython, + allow_yanked: bool, + format_control: Optional[FormatControl] = None, + candidate_prefs: Optional[CandidatePreferences] = None, + ignore_requires_python: Optional[bool] = None, + ) -> None: + """ + This constructor is primarily meant to be used by the create() class + method and from tests. + + :param format_control: A FormatControl object, used to control + the selection of source packages / binary packages when consulting + the index and links. + :param candidate_prefs: Options to use when creating a + CandidateEvaluator object. + """ + if candidate_prefs is None: + candidate_prefs = CandidatePreferences() + + format_control = format_control or FormatControl(set(), set()) + + self._allow_yanked = allow_yanked + self._candidate_prefs = candidate_prefs + self._ignore_requires_python = ignore_requires_python + self._link_collector = link_collector + self._target_python = target_python + + self.format_control = format_control + + # These are boring links that have already been logged somehow. + self._logged_links: Set[Link] = set() + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + @classmethod + def create( + cls, + link_collector: LinkCollector, + selection_prefs: SelectionPreferences, + target_python: Optional[TargetPython] = None, + ) -> "PackageFinder": + """Create a PackageFinder. + + :param selection_prefs: The candidate selection preferences, as a + SelectionPreferences object. + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + """ + if target_python is None: + target_python = TargetPython() + + candidate_prefs = CandidatePreferences( + prefer_binary=selection_prefs.prefer_binary, + allow_all_prereleases=selection_prefs.allow_all_prereleases, + ) + + return cls( + candidate_prefs=candidate_prefs, + link_collector=link_collector, + target_python=target_python, + allow_yanked=selection_prefs.allow_yanked, + format_control=selection_prefs.format_control, + ignore_requires_python=selection_prefs.ignore_requires_python, + ) + + @property + def target_python(self) -> TargetPython: + return self._target_python + + @property + def search_scope(self) -> SearchScope: + return self._link_collector.search_scope + + @search_scope.setter + def search_scope(self, search_scope: SearchScope) -> None: + self._link_collector.search_scope = search_scope + + @property + def find_links(self) -> List[str]: + return self._link_collector.find_links + + @property + def index_urls(self) -> List[str]: + return self.search_scope.index_urls + + @property + def trusted_hosts(self) -> Iterable[str]: + for host_port in self._link_collector.session.pip_trusted_origins: + yield build_netloc(*host_port) + + @property + def allow_all_prereleases(self) -> bool: + return self._candidate_prefs.allow_all_prereleases + + def set_allow_all_prereleases(self) -> None: + self._candidate_prefs.allow_all_prereleases = True + + @property + def prefer_binary(self) -> bool: + return self._candidate_prefs.prefer_binary + + def set_prefer_binary(self) -> None: + self._candidate_prefs.prefer_binary = True + + def make_link_evaluator(self, project_name: str) -> LinkEvaluator: + canonical_name = canonicalize_name(project_name) + formats = self.format_control.get_allowed_formats(canonical_name) + + return LinkEvaluator( + project_name=project_name, + canonical_name=canonical_name, + formats=formats, + target_python=self._target_python, + allow_yanked=self._allow_yanked, + ignore_requires_python=self._ignore_requires_python, + ) + + def _sort_links(self, links: Iterable[Link]) -> List[Link]: + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen: Set[Link] = set() + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _log_skipped_link(self, link: Link, reason: str) -> None: + if link not in self._logged_links: + # Put the link at the end so the reason is more visible and because + # the link string is usually very long. + logger.debug("Skipping link: %s: %s", reason, link) + self._logged_links.add(link) + + def get_install_candidate( + self, link_evaluator: LinkEvaluator, link: Link + ) -> Optional[InstallationCandidate]: + """ + If the link is a candidate for install, convert it to an + InstallationCandidate and return it. Otherwise, return None. + """ + is_candidate, result = link_evaluator.evaluate_link(link) + if not is_candidate: + if result: + self._log_skipped_link(link, reason=result) + return None + + return InstallationCandidate( + name=link_evaluator.project_name, + link=link, + version=result, + ) + + def evaluate_links( + self, link_evaluator: LinkEvaluator, links: Iterable[Link] + ) -> List[InstallationCandidate]: + """ + Convert links that are candidates to InstallationCandidate objects. + """ + candidates = [] + for link in self._sort_links(links): + candidate = self.get_install_candidate(link_evaluator, link) + if candidate is not None: + candidates.append(candidate) + + return candidates + + def process_project_url( + self, project_url: Link, link_evaluator: LinkEvaluator + ) -> List[InstallationCandidate]: + logger.debug( + "Fetching project page and analyzing links: %s", + project_url, + ) + html_page = self._link_collector.fetch_page(project_url) + if html_page is None: + return [] + + page_links = list(parse_links(html_page)) + + with indent_log(): + package_links = self.evaluate_links( + link_evaluator, + links=page_links, + ) + + return package_links + + @functools.lru_cache(maxsize=None) + def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]: + """Find all available InstallationCandidate for project_name + + This checks index_urls and find_links. + All versions found are returned as an InstallationCandidate list. + + See LinkEvaluator.evaluate_link() for details on which files + are accepted. + """ + link_evaluator = self.make_link_evaluator(project_name) + + collected_sources = self._link_collector.collect_sources( + project_name=project_name, + candidates_from_page=functools.partial( + self.process_project_url, + link_evaluator=link_evaluator, + ), + ) + + page_candidates_it = itertools.chain.from_iterable( + source.page_candidates() + for sources in collected_sources + for source in sources + if source is not None + ) + page_candidates = list(page_candidates_it) + + file_links_it = itertools.chain.from_iterable( + source.file_links() + for sources in collected_sources + for source in sources + if source is not None + ) + file_candidates = self.evaluate_links( + link_evaluator, + sorted(file_links_it, reverse=True), + ) + + if logger.isEnabledFor(logging.DEBUG) and file_candidates: + paths = [url_to_path(c.link.url) for c in file_candidates] + logger.debug("Local files found: %s", ", ".join(paths)) + + # This is an intentional priority ordering + return file_candidates + page_candidates + + def make_candidate_evaluator( + self, + project_name: str, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> CandidateEvaluator: + """Create a CandidateEvaluator object to use.""" + candidate_prefs = self._candidate_prefs + return CandidateEvaluator.create( + project_name=project_name, + target_python=self._target_python, + prefer_binary=candidate_prefs.prefer_binary, + allow_all_prereleases=candidate_prefs.allow_all_prereleases, + specifier=specifier, + hashes=hashes, + ) + + @functools.lru_cache(maxsize=None) + def find_best_candidate( + self, + project_name: str, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> BestCandidateResult: + """Find matches for the given project and specifier. + + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + + :return: A `BestCandidateResult` instance. + """ + candidates = self.find_all_candidates(project_name) + candidate_evaluator = self.make_candidate_evaluator( + project_name=project_name, + specifier=specifier, + hashes=hashes, + ) + return candidate_evaluator.compute_best_candidate(candidates) + + def find_requirement( + self, req: InstallRequirement, upgrade: bool + ) -> Optional[InstallationCandidate]: + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a InstallationCandidate if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + hashes = req.hashes(trust_internet=False) + best_candidate_result = self.find_best_candidate( + req.name, + specifier=req.specifier, + hashes=hashes, + ) + best_candidate = best_candidate_result.best_candidate + + installed_version: Optional[_BaseVersion] = None + if req.satisfied_by is not None: + installed_version = parse_version(req.satisfied_by.version) + + def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str: + # This repeated parse_version and str() conversion is needed to + # handle different vendoring sources from pip and pkg_resources. + # If we stop using the pkg_resources provided specifier and start + # using our own, we can drop the cast to str(). + return ( + ", ".join( + sorted( + {str(c.version) for c in cand_iter}, + key=parse_version, + ) + ) + or "none" + ) + + if installed_version is None and best_candidate is None: + logger.critical( + "Could not find a version that satisfies the requirement %s " + "(from versions: %s)", + req, + _format_versions(best_candidate_result.iter_all()), + ) + + raise DistributionNotFound( + "No matching distribution found for {}".format(req) + ) + + best_installed = False + if installed_version and ( + best_candidate is None or best_candidate.version <= installed_version + ): + best_installed = True + + if not upgrade and installed_version is not None: + if best_installed: + logger.debug( + "Existing installed version (%s) is most up-to-date and " + "satisfies requirement", + installed_version, + ) + else: + logger.debug( + "Existing installed version (%s) satisfies requirement " + "(most up-to-date version is %s)", + installed_version, + best_candidate.version, + ) + return None + + if best_installed: + # We have an existing version, and its the best version + logger.debug( + "Installed version (%s) is most up-to-date (past versions: %s)", + installed_version, + _format_versions(best_candidate_result.iter_applicable()), + ) + raise BestVersionAlreadyInstalled + + logger.debug( + "Using version %s (newest of versions: %s)", + best_candidate.version, + _format_versions(best_candidate_result.iter_applicable()), + ) + return best_candidate + + +def _find_name_version_sep(fragment: str, canonical_name: str) -> int: + """Find the separator's index based on the package's canonical name. + + :param fragment: A + filename "fragment" (stem) or + egg fragment. + :param canonical_name: The package's canonical name. + + This function is needed since the canonicalized name does not necessarily + have the same length as the egg info's name part. An example:: + + >>> fragment = 'foo__bar-1.0' + >>> canonical_name = 'foo-bar' + >>> _find_name_version_sep(fragment, canonical_name) + 8 + """ + # Project name and version must be separated by one single dash. Find all + # occurrences of dashes; if the string in front of it matches the canonical + # name, this is the one separating the name and version parts. + for i, c in enumerate(fragment): + if c != "-": + continue + if canonicalize_name(fragment[:i]) == canonical_name: + return i + raise ValueError(f"{fragment} does not match {canonical_name}") + + +def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]: + """Parse the version string from a + filename + "fragment" (stem) or egg fragment. + + :param fragment: The string to parse. E.g. foo-2.1 + :param canonical_name: The canonicalized name of the package this + belongs to. + """ + try: + version_start = _find_name_version_sep(fragment, canonical_name) + 1 + except ValueError: + return None + version = fragment[version_start:] + if not version: + return None + return version diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/index/sources.py b/.venv/lib/python3.9/site-packages/pip/_internal/index/sources.py new file mode 100644 index 0000000..eec3f12 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/index/sources.py @@ -0,0 +1,224 @@ +import logging +import mimetypes +import os +import pathlib +from typing import Callable, Iterable, Optional, Tuple + +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.link import Link +from pip._internal.utils.urls import path_to_url, url_to_path +from pip._internal.vcs import is_url + +logger = logging.getLogger(__name__) + +FoundCandidates = Iterable[InstallationCandidate] +FoundLinks = Iterable[Link] +CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]] +PageValidator = Callable[[Link], bool] + + +class LinkSource: + @property + def link(self) -> Optional[Link]: + """Returns the underlying link, if there's one.""" + raise NotImplementedError() + + def page_candidates(self) -> FoundCandidates: + """Candidates found by parsing an archive listing HTML file.""" + raise NotImplementedError() + + def file_links(self) -> FoundLinks: + """Links found by specifying archives directly.""" + raise NotImplementedError() + + +def _is_html_file(file_url: str) -> bool: + return mimetypes.guess_type(file_url, strict=False)[0] == "text/html" + + +class _FlatDirectorySource(LinkSource): + """Link source specified by ``--find-links=``. + + This looks the content of the directory, and returns: + + * ``page_candidates``: Links listed on each HTML file in the directory. + * ``file_candidates``: Archives in the directory. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + path: str, + ) -> None: + self._candidates_from_page = candidates_from_page + self._path = pathlib.Path(os.path.realpath(path)) + + @property + def link(self) -> Optional[Link]: + return None + + def page_candidates(self) -> FoundCandidates: + for path in self._path.iterdir(): + url = path_to_url(str(path)) + if not _is_html_file(url): + continue + yield from self._candidates_from_page(Link(url)) + + def file_links(self) -> FoundLinks: + for path in self._path.iterdir(): + url = path_to_url(str(path)) + if _is_html_file(url): + continue + yield Link(url) + + +class _LocalFileSource(LinkSource): + """``--find-links=`` or ``--[extra-]index-url=``. + + If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to + the option, it is converted to a URL first. This returns: + + * ``page_candidates``: Links listed on an HTML file. + * ``file_candidates``: The non-HTML file. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + if not _is_html_file(self._link.url): + return + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + if _is_html_file(self._link.url): + return + yield self._link + + +class _RemoteFileSource(LinkSource): + """``--find-links=`` or ``--[extra-]index-url=``. + + This returns: + + * ``page_candidates``: Links listed on an HTML file. + * ``file_candidates``: The non-HTML file. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + page_validator: PageValidator, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._page_validator = page_validator + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + if not self._page_validator(self._link): + return + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + yield self._link + + +class _IndexDirectorySource(LinkSource): + """``--[extra-]index-url=``. + + This is treated like a remote URL; ``candidates_from_page`` contains logic + for this by appending ``index.html`` to the link. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + return () + + +def build_source( + location: str, + *, + candidates_from_page: CandidatesFromPage, + page_validator: PageValidator, + expand_dir: bool, + cache_link_parsing: bool, +) -> Tuple[Optional[str], Optional[LinkSource]]: + + path: Optional[str] = None + url: Optional[str] = None + if os.path.exists(location): # Is a local path. + url = path_to_url(location) + path = location + elif location.startswith("file:"): # A file: URL. + url = location + path = url_to_path(location) + elif is_url(location): + url = location + + if url is None: + msg = ( + "Location '%s' is ignored: " + "it is either a non-existing path or lacks a specific scheme." + ) + logger.warning(msg, location) + return (None, None) + + if path is None: + source: LinkSource = _RemoteFileSource( + candidates_from_page=candidates_from_page, + page_validator=page_validator, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + + if os.path.isdir(path): + if expand_dir: + source = _FlatDirectorySource( + candidates_from_page=candidates_from_page, + path=path, + ) + else: + source = _IndexDirectorySource( + candidates_from_page=candidates_from_page, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + elif os.path.isfile(path): + source = _LocalFileSource( + candidates_from_page=candidates_from_page, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + logger.warning( + "Location '%s' is ignored: it is neither a file nor a directory.", + location, + ) + return (url, None) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/locations/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/locations/__init__.py new file mode 100644 index 0000000..dba182d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/locations/__init__.py @@ -0,0 +1,446 @@ +import functools +import logging +import os +import pathlib +import sys +import sysconfig +from typing import Any, Dict, Iterator, List, Optional, Tuple + +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.virtualenv import running_under_virtualenv + +from . import _distutils, _sysconfig +from .base import ( + USER_CACHE_DIR, + get_major_minor_version, + get_src_prefix, + is_osx_framework, + site_packages, + user_site, +) + +__all__ = [ + "USER_CACHE_DIR", + "get_bin_prefix", + "get_bin_user", + "get_major_minor_version", + "get_platlib", + "get_prefixed_libs", + "get_purelib", + "get_scheme", + "get_src_prefix", + "site_packages", + "user_site", +] + + +logger = logging.getLogger(__name__) + +if os.environ.get("_PIP_LOCATIONS_NO_WARN_ON_MISMATCH"): + _MISMATCH_LEVEL = logging.DEBUG +else: + _MISMATCH_LEVEL = logging.WARNING + +_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib") + +_USE_SYSCONFIG = sys.version_info >= (3, 10) + + +def _looks_like_bpo_44860() -> bool: + """The resolution to bpo-44860 will change this incorrect platlib. + + See . + """ + from distutils.command.install import INSTALL_SCHEMES # type: ignore + + try: + unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"] + except KeyError: + return False + return unix_user_platlib == "$usersite" + + +def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool: + platlib = scheme["platlib"] + if "/$platlibdir/" in platlib and hasattr(sys, "platlibdir"): + platlib = platlib.replace("/$platlibdir/", f"/{sys.platlibdir}/") + if "/lib64/" not in platlib: + return False + unpatched = platlib.replace("/lib64/", "/lib/") + return unpatched.replace("$platbase/", "$base/") == scheme["purelib"] + + +@functools.lru_cache(maxsize=None) +def _looks_like_red_hat_lib() -> bool: + """Red Hat patches platlib in unix_prefix and unix_home, but not purelib. + + This is the only way I can see to tell a Red Hat-patched Python. + """ + from distutils.command.install import INSTALL_SCHEMES # type: ignore + + return all( + k in INSTALL_SCHEMES + and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k]) + for k in ("unix_prefix", "unix_home") + ) + + +@functools.lru_cache(maxsize=None) +def _looks_like_debian_scheme() -> bool: + """Debian adds two additional schemes.""" + from distutils.command.install import INSTALL_SCHEMES # type: ignore + + return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES + + +@functools.lru_cache(maxsize=None) +def _looks_like_red_hat_scheme() -> bool: + """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``. + + Red Hat's ``00251-change-user-install-location.patch`` changes the install + command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is + (fortunately?) done quite unconditionally, so we create a default command + object without any configuration to detect this. + """ + from distutils.command.install import install + from distutils.dist import Distribution + + cmd: Any = install(Distribution()) + cmd.finalize_options() + return ( + cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local" + and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local" + ) + + +@functools.lru_cache(maxsize=None) +def _looks_like_msys2_mingw_scheme() -> bool: + """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme. + + However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is + likely going to be included in their 3.10 release, so we ignore the warning. + See msys2/MINGW-packages#9319. + + MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase, + and is missing the final ``"site-packages"``. + """ + paths = sysconfig.get_paths("nt", expand=False) + return all( + "Lib" not in p and "lib" in p and not p.endswith("site-packages") + for p in (paths[key] for key in ("platlib", "purelib")) + ) + + +def _fix_abiflags(parts: Tuple[str]) -> Iterator[str]: + ldversion = sysconfig.get_config_var("LDVERSION") + abiflags: str = getattr(sys, "abiflags", None) + + # LDVERSION does not end with sys.abiflags. Just return the path unchanged. + if not ldversion or not abiflags or not ldversion.endswith(abiflags): + yield from parts + return + + # Strip sys.abiflags from LDVERSION-based path components. + for part in parts: + if part.endswith(ldversion): + part = part[: (0 - len(abiflags))] + yield part + + +@functools.lru_cache(maxsize=None) +def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None: + issue_url = "https://github.com/pypa/pip/issues/10151" + message = ( + "Value for %s does not match. Please report this to <%s>" + "\ndistutils: %s" + "\nsysconfig: %s" + ) + logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new) + + +def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool: + if old == new: + return False + _warn_mismatched(old, new, key=key) + return True + + +@functools.lru_cache(maxsize=None) +def _log_context( + *, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + prefix: Optional[str] = None, +) -> None: + parts = [ + "Additional context:", + "user = %r", + "home = %r", + "root = %r", + "prefix = %r", + ] + + logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix) + + +def get_scheme( + dist_name: str, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> Scheme: + new = _sysconfig.get_scheme( + dist_name, + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + if _USE_SYSCONFIG: + return new + + old = _distutils.get_scheme( + dist_name, + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + + warning_contexts = [] + for k in SCHEME_KEYS: + old_v = pathlib.Path(getattr(old, k)) + new_v = pathlib.Path(getattr(new, k)) + + if old_v == new_v: + continue + + # distutils incorrectly put PyPy packages under ``site-packages/python`` + # in the ``posix_home`` scheme, but PyPy devs said they expect the + # directory name to be ``pypy`` instead. So we treat this as a bug fix + # and not warn about it. See bpo-43307 and python/cpython#24628. + skip_pypy_special_case = ( + sys.implementation.name == "pypy" + and home is not None + and k in ("platlib", "purelib") + and old_v.parent == new_v.parent + and old_v.name.startswith("python") + and new_v.name.startswith("pypy") + ) + if skip_pypy_special_case: + continue + + # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in + # the ``include`` value, but distutils's ``headers`` does. We'll let + # CPython decide whether this is a bug or feature. See bpo-43948. + skip_osx_framework_user_special_case = ( + user + and is_osx_framework() + and k == "headers" + and old_v.parent.parent == new_v.parent + and old_v.parent.name.startswith("python") + ) + if skip_osx_framework_user_special_case: + continue + + # On Red Hat and derived Linux distributions, distutils is patched to + # use "lib64" instead of "lib" for platlib. + if k == "platlib" and _looks_like_red_hat_lib(): + continue + + # On Python 3.9+, sysconfig's posix_user scheme sets platlib against + # sys.platlibdir, but distutils's unix_user incorrectly coninutes + # using the same $usersite for both platlib and purelib. This creates a + # mismatch when sys.platlibdir is not "lib". + skip_bpo_44860 = ( + user + and k == "platlib" + and not WINDOWS + and sys.version_info >= (3, 9) + and _PLATLIBDIR != "lib" + and _looks_like_bpo_44860() + ) + if skip_bpo_44860: + continue + + # Both Debian and Red Hat patch Python to place the system site under + # /usr/local instead of /usr. Debian also places lib in dist-packages + # instead of site-packages, but the /usr/local check should cover it. + skip_linux_system_special_case = ( + not (user or home or prefix or running_under_virtualenv()) + and old_v.parts[1:3] == ("usr", "local") + and len(new_v.parts) > 1 + and new_v.parts[1] == "usr" + and (len(new_v.parts) < 3 or new_v.parts[2] != "local") + and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme()) + ) + if skip_linux_system_special_case: + continue + + # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in + # the "pythonX.Y" part of the path, but distutils does. + skip_sysconfig_abiflag_bug = ( + sys.version_info < (3, 8) + and not WINDOWS + and k in ("headers", "platlib", "purelib") + and tuple(_fix_abiflags(old_v.parts)) == new_v.parts + ) + if skip_sysconfig_abiflag_bug: + continue + + # MSYS2 MINGW's sysconfig patch does not include the "site-packages" + # part of the path. This is incorrect and will be fixed in MSYS. + skip_msys2_mingw_bug = ( + WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme() + ) + if skip_msys2_mingw_bug: + continue + + # CPython's POSIX install script invokes pip (via ensurepip) against the + # interpreter located in the source tree, not the install site. This + # triggers special logic in sysconfig that's not present in distutils. + # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194 + skip_cpython_build = ( + sysconfig.is_python_build(check_home=True) + and not WINDOWS + and k in ("headers", "include", "platinclude") + ) + if skip_cpython_build: + continue + + warning_contexts.append((old_v, new_v, f"scheme.{k}")) + + if not warning_contexts: + return old + + # Check if this path mismatch is caused by distutils config files. Those + # files will no longer work once we switch to sysconfig, so this raises a + # deprecation message for them. + default_old = _distutils.distutils_scheme( + dist_name, + user, + home, + root, + isolated, + prefix, + ignore_config_files=True, + ) + if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS): + deprecated( + reason=( + "Configuring installation scheme with distutils config files " + "is deprecated and will no longer work in the near future. If you " + "are using a Homebrew or Linuxbrew Python, please see discussion " + "at https://github.com/Homebrew/homebrew-core/issues/76621" + ), + replacement=None, + gone_in=None, + ) + return old + + # Post warnings about this mismatch so user can report them back. + for old_v, new_v, key in warning_contexts: + _warn_mismatched(old_v, new_v, key=key) + _log_context(user=user, home=home, root=root, prefix=prefix) + + return old + + +def get_bin_prefix() -> str: + new = _sysconfig.get_bin_prefix() + if _USE_SYSCONFIG: + return new + + old = _distutils.get_bin_prefix() + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"): + _log_context() + return old + + +def get_bin_user() -> str: + return _sysconfig.get_scheme("", user=True).scripts + + +def _looks_like_deb_system_dist_packages(value: str) -> bool: + """Check if the value is Debian's APT-controlled dist-packages. + + Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the + default package path controlled by APT, but does not patch ``sysconfig`` to + do the same. This is similar to the bug worked around in ``get_scheme()``, + but here the default is ``deb_system`` instead of ``unix_local``. Ultimately + we can't do anything about this Debian bug, and this detection allows us to + skip the warning when needed. + """ + if not _looks_like_debian_scheme(): + return False + if value == "/usr/lib/python3/dist-packages": + return True + return False + + +def get_purelib() -> str: + """Return the default pure-Python lib location.""" + new = _sysconfig.get_purelib() + if _USE_SYSCONFIG: + return new + + old = _distutils.get_purelib() + if _looks_like_deb_system_dist_packages(old): + return old + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"): + _log_context() + return old + + +def get_platlib() -> str: + """Return the default platform-shared lib location.""" + new = _sysconfig.get_platlib() + if _USE_SYSCONFIG: + return new + + old = _distutils.get_platlib() + if _looks_like_deb_system_dist_packages(old): + return old + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"): + _log_context() + return old + + +def _deduplicated(v1: str, v2: str) -> List[str]: + """Deduplicate values from a list.""" + if v1 == v2: + return [v1] + return [v1, v2] + + +def get_prefixed_libs(prefix: str) -> List[str]: + """Return the lib locations under ``prefix``.""" + new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix) + if _USE_SYSCONFIG: + return _deduplicated(new_pure, new_plat) + + old_pure, old_plat = _distutils.get_prefixed_libs(prefix) + + warned = [ + _warn_if_mismatch( + pathlib.Path(old_pure), + pathlib.Path(new_pure), + key="prefixed-purelib", + ), + _warn_if_mismatch( + pathlib.Path(old_plat), + pathlib.Path(new_plat), + key="prefixed-platlib", + ), + ] + if any(warned): + _log_context(prefix=prefix) + + return _deduplicated(old_pure, old_plat) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..def6e63 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc new file mode 100644 index 0000000..02753f6 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc new file mode 100644 index 0000000..958f17d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..69ae971 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/locations/_distutils.py b/.venv/lib/python3.9/site-packages/pip/_internal/locations/_distutils.py new file mode 100644 index 0000000..2ec79e6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/locations/_distutils.py @@ -0,0 +1,169 @@ +"""Locations where we look for configs, install stuff, etc""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import os +import sys +from distutils.cmd import Command as DistutilsCommand +from distutils.command.install import SCHEME_KEYS +from distutils.command.install import install as distutils_install_command +from distutils.sysconfig import get_python_lib +from typing import Dict, List, Optional, Tuple, Union, cast + +from pip._internal.models.scheme import Scheme +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import get_major_minor_version + +logger = logging.getLogger(__name__) + + +def distutils_scheme( + dist_name: str, + user: bool = False, + home: str = None, + root: str = None, + isolated: bool = False, + prefix: str = None, + *, + ignore_config_files: bool = False, +) -> Dict[str, str]: + """ + Return a distutils install scheme + """ + from distutils.dist import Distribution + + dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name} + if isolated: + dist_args["script_args"] = ["--no-user-cfg"] + + d = Distribution(dist_args) + if not ignore_config_files: + try: + d.parse_config_files() + except UnicodeDecodeError: + # Typeshed does not include find_config_files() for some reason. + paths = d.find_config_files() # type: ignore + logger.warning( + "Ignore distutils configs in %s due to encoding errors.", + ", ".join(os.path.basename(p) for p in paths), + ) + obj: Optional[DistutilsCommand] = None + obj = d.get_command_obj("install", create=True) + assert obj is not None + i = cast(distutils_install_command, obj) + # NOTE: setting user or home has the side-effect of creating the home dir + # or user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. + assert not (user and prefix), f"user={user} prefix={prefix}" + assert not (home and prefix), f"home={home} prefix={prefix}" + i.user = user or i.user + if user or home: + i.prefix = "" + i.prefix = prefix or i.prefix + i.home = home or i.home + i.root = root or i.root + i.finalize_options() + + scheme = {} + for key in SCHEME_KEYS: + scheme[key] = getattr(i, "install_" + key) + + # install_lib specified in setup.cfg should install *everything* + # into there (i.e. it takes precedence over both purelib and + # platlib). Note, i.install_lib is *always* set after + # finalize_options(); we only want to override here if the user + # has explicitly requested it hence going back to the config + if "install_lib" in d.get_option_dict("install"): + scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + + if running_under_virtualenv(): + if home: + prefix = home + elif user: + prefix = i.install_userbase # type: ignore + else: + prefix = i.prefix + scheme["headers"] = os.path.join( + prefix, + "include", + "site", + f"python{get_major_minor_version()}", + dist_name, + ) + + if root is not None: + path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1] + scheme["headers"] = os.path.join(root, path_no_drive[1:]) + + return scheme + + +def get_scheme( + dist_name: str, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> Scheme: + """ + Get the "scheme" corresponding to the input parameters. The distutils + documentation provides the context for the available schemes: + https://docs.python.org/3/install/index.html#alternate-installation + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme and provides the base + directory for the same + :param root: root under which other directories are re-based + :param isolated: equivalent to --no-user-cfg, i.e. do not consider + ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for + scheme paths + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix) + return Scheme( + platlib=scheme["platlib"], + purelib=scheme["purelib"], + headers=scheme["headers"], + scripts=scheme["scripts"], + data=scheme["data"], + ) + + +def get_bin_prefix() -> str: + # XXX: In old virtualenv versions, sys.prefix can contain '..' components, + # so we need to call normpath to eliminate them. + prefix = os.path.normpath(sys.prefix) + if WINDOWS: + bin_py = os.path.join(prefix, "Scripts") + # buildout uses 'bin' on Windows too? + if not os.path.exists(bin_py): + bin_py = os.path.join(prefix, "bin") + return bin_py + # Forcing to use /usr/local/bin for standard macOS framework installs + # Also log to ~/Library/Logs/ for use with the Console.app log viewer + if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/": + return "/usr/local/bin" + return os.path.join(prefix, "bin") + + +def get_purelib() -> str: + return get_python_lib(plat_specific=False) + + +def get_platlib() -> str: + return get_python_lib(plat_specific=True) + + +def get_prefixed_libs(prefix: str) -> Tuple[str, str]: + return ( + get_python_lib(plat_specific=False, prefix=prefix), + get_python_lib(plat_specific=True, prefix=prefix), + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/locations/_sysconfig.py b/.venv/lib/python3.9/site-packages/pip/_internal/locations/_sysconfig.py new file mode 100644 index 0000000..5e141aa --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/locations/_sysconfig.py @@ -0,0 +1,219 @@ +import distutils.util # FIXME: For change_root. +import logging +import os +import sys +import sysconfig +import typing + +from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import get_major_minor_version, is_osx_framework + +logger = logging.getLogger(__name__) + + +# Notes on _infer_* functions. +# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no +# way to ask things like "what is the '_prefix' scheme on this platform". These +# functions try to answer that with some heuristics while accounting for ad-hoc +# platforms not covered by CPython's default sysconfig implementation. If the +# ad-hoc implementation does not fully implement sysconfig, we'll fall back to +# a POSIX scheme. + +_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) + +_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None) + + +def _should_use_osx_framework_prefix() -> bool: + """Check for Apple's ``osx_framework_library`` scheme. + + Python distributed by Apple's Command Line Tools has this special scheme + that's used when: + + * This is a framework build. + * We are installing into the system prefix. + + This does not account for ``pip install --prefix`` (also means we're not + installing to the system prefix), which should use ``posix_prefix``, but + logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But + since ``prefix`` is not available for ``sysconfig.get_default_scheme()``, + which is the stdlib replacement for ``_infer_prefix()``, presumably Apple + wouldn't be able to magically switch between ``osx_framework_library`` and + ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library`` + means its behavior is consistent whether we use the stdlib implementation + or our own, and we deal with this special case in ``get_scheme()`` instead. + """ + return ( + "osx_framework_library" in _AVAILABLE_SCHEMES + and not running_under_virtualenv() + and is_osx_framework() + ) + + +def _infer_prefix() -> str: + """Try to find a prefix scheme for the current platform. + + This tries: + + * A special ``osx_framework_library`` for Python distributed by Apple's + Command Line Tools, when not running in a virtual environment. + * Implementation + OS, used by PyPy on Windows (``pypy_nt``). + * Implementation without OS, used by PyPy on POSIX (``pypy``). + * OS + "prefix", used by CPython on POSIX (``posix_prefix``). + * Just the OS name, used by CPython on Windows (``nt``). + + If none of the above works, fall back to ``posix_prefix``. + """ + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("prefix") + if _should_use_osx_framework_prefix(): + return "osx_framework_library" + implementation_suffixed = f"{sys.implementation.name}_{os.name}" + if implementation_suffixed in _AVAILABLE_SCHEMES: + return implementation_suffixed + if sys.implementation.name in _AVAILABLE_SCHEMES: + return sys.implementation.name + suffixed = f"{os.name}_prefix" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt". + return os.name + return "posix_prefix" + + +def _infer_user() -> str: + """Try to find a user scheme for the current platform.""" + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("user") + if is_osx_framework() and not running_under_virtualenv(): + suffixed = "osx_framework_user" + else: + suffixed = f"{os.name}_user" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable. + raise UserInstallationInvalid() + return "posix_user" + + +def _infer_home() -> str: + """Try to find a home for the current platform.""" + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("home") + suffixed = f"{os.name}_home" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + return "posix_home" + + +# Update these keys if the user sets a custom home. +_HOME_KEYS = [ + "installed_base", + "base", + "installed_platbase", + "platbase", + "prefix", + "exec_prefix", +] +if sysconfig.get_config_var("userbase") is not None: + _HOME_KEYS.append("userbase") + + +def get_scheme( + dist_name: str, + user: bool = False, + home: typing.Optional[str] = None, + root: typing.Optional[str] = None, + isolated: bool = False, + prefix: typing.Optional[str] = None, +) -> Scheme: + """ + Get the "scheme" corresponding to the input parameters. + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme + :param root: root under which other directories are re-based + :param isolated: ignored, but kept for distutils compatibility (where + this controls whether the user-site pydistutils.cfg is honored) + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + if user and prefix: + raise InvalidSchemeCombination("--user", "--prefix") + if home and prefix: + raise InvalidSchemeCombination("--home", "--prefix") + + if home is not None: + scheme_name = _infer_home() + elif user: + scheme_name = _infer_user() + else: + scheme_name = _infer_prefix() + + # Special case: When installing into a custom prefix, use posix_prefix + # instead of osx_framework_library. See _should_use_osx_framework_prefix() + # docstring for details. + if prefix is not None and scheme_name == "osx_framework_library": + scheme_name = "posix_prefix" + + if home is not None: + variables = {k: home for k in _HOME_KEYS} + elif prefix is not None: + variables = {k: prefix for k in _HOME_KEYS} + else: + variables = {} + + paths = sysconfig.get_paths(scheme=scheme_name, vars=variables) + + # Logic here is very arbitrary, we're doing it for compatibility, don't ask. + # 1. Pip historically uses a special header path in virtual environments. + # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We + # only do the same when not running in a virtual environment because + # pip's historical header path logic (see point 1) did not do this. + if running_under_virtualenv(): + if user: + base = variables.get("userbase", sys.prefix) + else: + base = variables.get("base", sys.prefix) + python_xy = f"python{get_major_minor_version()}" + paths["include"] = os.path.join(base, "include", "site", python_xy) + elif not dist_name: + dist_name = "UNKNOWN" + + scheme = Scheme( + platlib=paths["platlib"], + purelib=paths["purelib"], + headers=os.path.join(paths["include"], dist_name), + scripts=paths["scripts"], + data=paths["data"], + ) + if root is not None: + for key in SCHEME_KEYS: + value = distutils.util.change_root(root, getattr(scheme, key)) + setattr(scheme, key, value) + return scheme + + +def get_bin_prefix() -> str: + # Forcing to use /usr/local/bin for standard macOS framework installs. + if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/": + return "/usr/local/bin" + return sysconfig.get_paths()["scripts"] + + +def get_purelib() -> str: + return sysconfig.get_paths()["purelib"] + + +def get_platlib() -> str: + return sysconfig.get_paths()["platlib"] + + +def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]: + paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix}) + return (paths["purelib"], paths["platlib"]) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/locations/base.py b/.venv/lib/python3.9/site-packages/pip/_internal/locations/base.py new file mode 100644 index 0000000..86dad4a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/locations/base.py @@ -0,0 +1,52 @@ +import functools +import os +import site +import sys +import sysconfig +import typing + +from pip._internal.utils import appdirs +from pip._internal.utils.virtualenv import running_under_virtualenv + +# Application Directories +USER_CACHE_DIR = appdirs.user_cache_dir("pip") + +# FIXME doesn't account for venv linked to global site-packages +site_packages: typing.Optional[str] = sysconfig.get_path("purelib") + + +def get_major_minor_version() -> str: + """ + Return the major-minor version of the current Python as a string, e.g. + "3.7" or "3.10". + """ + return "{}.{}".format(*sys.version_info) + + +def get_src_prefix() -> str: + if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, "src") + else: + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), "src") + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit("The folder you are executing pip from can no longer be found.") + + # under macOS + virtualenv sys.prefix is not properly resolved + # it is something like /path/to/python/bin/.. + return os.path.abspath(src_prefix) + + +try: + # Use getusersitepackages if this is present, as it ensures that the + # value is initialised properly. + user_site: typing.Optional[str] = site.getusersitepackages() +except AttributeError: + user_site = site.USER_SITE + + +@functools.lru_cache(maxsize=None) +def is_osx_framework() -> bool: + return bool(sysconfig.get_config_var("PYTHONFRAMEWORK")) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/main.py b/.venv/lib/python3.9/site-packages/pip/_internal/main.py new file mode 100644 index 0000000..33c6d24 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/main.py @@ -0,0 +1,12 @@ +from typing import List, Optional + + +def main(args: Optional[List[str]] = None) -> int: + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__init__.py new file mode 100644 index 0000000..f4f2a4f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__init__.py @@ -0,0 +1,51 @@ +from typing import List, Optional + +from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel + +__all__ = [ + "BaseDistribution", + "BaseEnvironment", + "FilesystemWheel", + "MemoryWheel", + "Wheel", + "get_default_environment", + "get_environment", + "get_wheel_distribution", +] + + +def get_default_environment() -> BaseEnvironment: + """Get the default representation for the current environment. + + This returns an Environment instance from the chosen backend. The default + Environment instance should be built from ``sys.path`` and may use caching + to share instance state accorss calls. + """ + from .pkg_resources import Environment + + return Environment.default() + + +def get_environment(paths: Optional[List[str]]) -> BaseEnvironment: + """Get a representation of the environment specified by ``paths``. + + This returns an Environment instance from the chosen backend based on the + given import paths. The backend must build a fresh instance representing + the state of installed distributions when this function is called. + """ + from .pkg_resources import Environment + + return Environment.from_paths(paths) + + +def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution: + """Get the representation of the specified wheel's distribution metadata. + + This returns a Distribution instance from the chosen backend based on + the given wheel's ``.dist-info`` directory. + + :param canonical_name: Normalized project name of the given wheel. + """ + from .pkg_resources import Distribution + + return Distribution.from_wheel(wheel, canonical_name) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e5e00ba Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..20670d4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc new file mode 100644 index 0000000..dbd0a68 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/metadata/base.py b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/base.py new file mode 100644 index 0000000..4788360 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/base.py @@ -0,0 +1,330 @@ +import email.message +import json +import logging +import re +import zipfile +from typing import ( + IO, + TYPE_CHECKING, + Collection, + Container, + Iterable, + Iterator, + List, + Optional, + Union, +) + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet +from pip._vendor.packaging.utils import NormalizedName +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.models.direct_url import ( + DIRECT_URL_METADATA_NAME, + DirectUrl, + DirectUrlValidationError, +) +from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here. +from pip._internal.utils.egg_link import egg_link_path_from_sys_path +from pip._internal.utils.urls import url_to_path + +if TYPE_CHECKING: + from typing import Protocol +else: + Protocol = object + +DistributionVersion = Union[LegacyVersion, Version] + +logger = logging.getLogger(__name__) + + +class BaseEntryPoint(Protocol): + @property + def name(self) -> str: + raise NotImplementedError() + + @property + def value(self) -> str: + raise NotImplementedError() + + @property + def group(self) -> str: + raise NotImplementedError() + + +class BaseDistribution(Protocol): + def __repr__(self) -> str: + return f"{self.raw_name} {self.version} ({self.location})" + + def __str__(self) -> str: + return f"{self.raw_name} {self.version}" + + @property + def location(self) -> Optional[str]: + """Where the distribution is loaded from. + + A string value is not necessarily a filesystem path, since distributions + can be loaded from other sources, e.g. arbitrary zip archives. ``None`` + means the distribution is created in-memory. + + Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If + this is a symbolic link, we want to preserve the relative path between + it and files in the distribution. + """ + raise NotImplementedError() + + @property + def editable_project_location(self) -> Optional[str]: + """The project location for editable distributions. + + This is the directory where pyproject.toml or setup.py is located. + None if the distribution is not installed in editable mode. + """ + # TODO: this property is relatively costly to compute, memoize it ? + direct_url = self.direct_url + if direct_url: + if direct_url.is_local_editable(): + return url_to_path(direct_url.url) + else: + # Search for an .egg-link file by walking sys.path, as it was + # done before by dist_is_editable(). + egg_link_path = egg_link_path_from_sys_path(self.raw_name) + if egg_link_path: + # TODO: get project location from second line of egg_link file + # (https://github.com/pypa/pip/issues/10243) + return self.location + return None + + @property + def info_directory(self) -> Optional[str]: + """Location of the .[egg|dist]-info directory. + + Similarly to ``location``, a string value is not necessarily a + filesystem path. ``None`` means the distribution is created in-memory. + + For a modern .dist-info installation on disk, this should be something + like ``{location}/{raw_name}-{version}.dist-info``. + + Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If + this is a symbolic link, we want to preserve the relative path between + it and other files in the distribution. + """ + raise NotImplementedError() + + @property + def canonical_name(self) -> NormalizedName: + raise NotImplementedError() + + @property + def version(self) -> DistributionVersion: + raise NotImplementedError() + + @property + def direct_url(self) -> Optional[DirectUrl]: + """Obtain a DirectUrl from this distribution. + + Returns None if the distribution has no `direct_url.json` metadata, + or if `direct_url.json` is invalid. + """ + try: + content = self.read_text(DIRECT_URL_METADATA_NAME) + except FileNotFoundError: + return None + try: + return DirectUrl.from_json(content) + except ( + UnicodeDecodeError, + json.JSONDecodeError, + DirectUrlValidationError, + ) as e: + logger.warning( + "Error parsing %s for %s: %s", + DIRECT_URL_METADATA_NAME, + self.canonical_name, + e, + ) + return None + + @property + def installer(self) -> str: + raise NotImplementedError() + + @property + def editable(self) -> bool: + return bool(self.editable_project_location) + + @property + def local(self) -> bool: + raise NotImplementedError() + + @property + def in_usersite(self) -> bool: + raise NotImplementedError() + + @property + def in_site_packages(self) -> bool: + raise NotImplementedError() + + def read_text(self, name: str) -> str: + """Read a file in the .dist-info (or .egg-info) directory. + + Should raise ``FileNotFoundError`` if ``name`` does not exist in the + metadata directory. + """ + raise NotImplementedError() + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + raise NotImplementedError() + + @property + def metadata(self) -> email.message.Message: + """Metadata of distribution parsed from e.g. METADATA or PKG-INFO.""" + raise NotImplementedError() + + @property + def metadata_version(self) -> Optional[str]: + """Value of "Metadata-Version:" in distribution metadata, if available.""" + return self.metadata.get("Metadata-Version") + + @property + def raw_name(self) -> str: + """Value of "Name:" in distribution metadata.""" + # The metadata should NEVER be missing the Name: key, but if it somehow + # does, fall back to the known canonical name. + return self.metadata.get("Name", self.canonical_name) + + @property + def requires_python(self) -> SpecifierSet: + """Value of "Requires-Python:" in distribution metadata. + + If the key does not exist or contains an invalid value, an empty + SpecifierSet should be returned. + """ + value = self.metadata.get("Requires-Python") + if value is None: + return SpecifierSet() + try: + # Convert to str to satisfy the type checker; this can be a Header object. + spec = SpecifierSet(str(value)) + except InvalidSpecifier as e: + message = "Package %r has an invalid Requires-Python: %s" + logger.warning(message, self.raw_name, e) + return SpecifierSet() + return spec + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + """Dependencies of this distribution. + + For modern .dist-info distributions, this is the collection of + "Requires-Dist:" entries in distribution metadata. + """ + raise NotImplementedError() + + def iter_provided_extras(self) -> Iterable[str]: + """Extras provided by this distribution. + + For modern .dist-info distributions, this is the collection of + "Provides-Extra:" entries in distribution metadata. + """ + raise NotImplementedError() + + +class BaseEnvironment: + """An environment containing distributions to introspect.""" + + @classmethod + def default(cls) -> "BaseEnvironment": + raise NotImplementedError() + + @classmethod + def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment": + raise NotImplementedError() + + def get_distribution(self, name: str) -> Optional["BaseDistribution"]: + """Given a requirement name, return the installed distributions.""" + raise NotImplementedError() + + def _iter_distributions(self) -> Iterator["BaseDistribution"]: + """Iterate through installed distributions. + + This function should be implemented by subclass, but never called + directly. Use the public ``iter_distribution()`` instead, which + implements additional logic to make sure the distributions are valid. + """ + raise NotImplementedError() + + def iter_distributions(self) -> Iterator["BaseDistribution"]: + """Iterate through installed distributions.""" + for dist in self._iter_distributions(): + # Make sure the distribution actually comes from a valid Python + # packaging distribution. Pip's AdjacentTempDirectory leaves folders + # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The + # valid project name pattern is taken from PEP 508. + project_name_valid = re.match( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", + dist.canonical_name, + flags=re.IGNORECASE, + ) + if not project_name_valid: + logger.warning( + "Ignoring invalid distribution %s (%s)", + dist.canonical_name, + dist.location, + ) + continue + yield dist + + def iter_installed_distributions( + self, + local_only: bool = True, + skip: Container[str] = stdlib_pkgs, + include_editables: bool = True, + editables_only: bool = False, + user_only: bool = False, + ) -> Iterator[BaseDistribution]: + """Return a list of installed distributions. + + :param local_only: If True (default), only return installations + local to the current virtualenv, if in a virtualenv. + :param skip: An iterable of canonicalized project names to ignore; + defaults to ``stdlib_pkgs``. + :param include_editables: If False, don't report editables. + :param editables_only: If True, only report editables. + :param user_only: If True, only report installations in the user + site directory. + """ + it = self.iter_distributions() + if local_only: + it = (d for d in it if d.local) + if not include_editables: + it = (d for d in it if not d.editable) + if editables_only: + it = (d for d in it if d.editable) + if user_only: + it = (d for d in it if d.in_usersite) + return (d for d in it if d.canonical_name not in skip) + + +class Wheel(Protocol): + location: str + + def as_zipfile(self) -> zipfile.ZipFile: + raise NotImplementedError() + + +class FilesystemWheel(Wheel): + def __init__(self, location: str) -> None: + self.location = location + + def as_zipfile(self) -> zipfile.ZipFile: + return zipfile.ZipFile(self.location, allowZip64=True) + + +class MemoryWheel(Wheel): + def __init__(self, location: str, stream: IO[bytes]) -> None: + self.location = location + self.stream = stream + + def as_zipfile(self) -> zipfile.ZipFile: + return zipfile.ZipFile(self.stream, allowZip64=True) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/metadata/pkg_resources.py b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/pkg_resources.py new file mode 100644 index 0000000..e8a8a38 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/metadata/pkg_resources.py @@ -0,0 +1,146 @@ +import email.message +import logging +from typing import Collection, Iterable, Iterator, List, NamedTuple, Optional + +from pip._vendor import pkg_resources +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.utils import misc # TODO: Move definition here. +from pip._internal.utils.packaging import get_installer, get_metadata +from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel + +from .base import ( + BaseDistribution, + BaseEntryPoint, + BaseEnvironment, + DistributionVersion, + Wheel, +) + +logger = logging.getLogger(__name__) + + +class EntryPoint(NamedTuple): + name: str + value: str + group: str + + +class Distribution(BaseDistribution): + def __init__(self, dist: pkg_resources.Distribution) -> None: + self._dist = dist + + @classmethod + def from_wheel(cls, wheel: Wheel, name: str) -> "Distribution": + with wheel.as_zipfile() as zf: + dist = pkg_resources_distribution_for_wheel(zf, name, wheel.location) + return cls(dist) + + @property + def location(self) -> Optional[str]: + return self._dist.location + + @property + def info_directory(self) -> Optional[str]: + return self._dist.egg_info + + @property + def canonical_name(self) -> NormalizedName: + return canonicalize_name(self._dist.project_name) + + @property + def version(self) -> DistributionVersion: + return parse_version(self._dist.version) + + @property + def installer(self) -> str: + return get_installer(self._dist) + + @property + def local(self) -> bool: + return misc.dist_is_local(self._dist) + + @property + def in_usersite(self) -> bool: + return misc.dist_in_usersite(self._dist) + + @property + def in_site_packages(self) -> bool: + return misc.dist_in_site_packages(self._dist) + + def read_text(self, name: str) -> str: + if not self._dist.has_metadata(name): + raise FileNotFoundError(name) + return self._dist.get_metadata(name) + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + for group, entries in self._dist.get_entry_map().items(): + for name, entry_point in entries.items(): + name, _, value = str(entry_point).partition("=") + yield EntryPoint(name=name.strip(), value=value.strip(), group=group) + + @property + def metadata(self) -> email.message.Message: + return get_metadata(self._dist) + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + if extras: # pkg_resources raises on invalid extras, so we sanitize. + extras = frozenset(extras).intersection(self._dist.extras) + return self._dist.requires(extras) + + def iter_provided_extras(self) -> Iterable[str]: + return self._dist.extras + + +class Environment(BaseEnvironment): + def __init__(self, ws: pkg_resources.WorkingSet) -> None: + self._ws = ws + + @classmethod + def default(cls) -> BaseEnvironment: + return cls(pkg_resources.working_set) + + @classmethod + def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment: + return cls(pkg_resources.WorkingSet(paths)) + + def _search_distribution(self, name: str) -> Optional[BaseDistribution]: + """Find a distribution matching the ``name`` in the environment. + + This searches from *all* distributions available in the environment, to + match the behavior of ``pkg_resources.get_distribution()``. + """ + canonical_name = canonicalize_name(name) + for dist in self.iter_distributions(): + if dist.canonical_name == canonical_name: + return dist + return None + + def get_distribution(self, name: str) -> Optional[BaseDistribution]: + + # Search the distribution by looking through the working set. + dist = self._search_distribution(name) + if dist: + return dist + + # If distribution could not be found, call working_set.require to + # update the working set, and try to find the distribution again. + # This might happen for e.g. when you install a package twice, once + # using setup.py develop and again using setup.py install. Now when + # running pip uninstall twice, the package gets removed from the + # working set in the first uninstall, so we have to populate the + # working set again so that pip knows about it and the packages gets + # picked up and is successfully uninstalled the second time too. + try: + # We didn't pass in any version specifiers, so this can never + # raise pkg_resources.VersionConflict. + self._ws.require(name) + except pkg_resources.DistributionNotFound: + return None + return self._search_distribution(name) + + def _iter_distributions(self) -> Iterator[BaseDistribution]: + for dist in self._ws: + yield Distribution(dist) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/__init__.py new file mode 100644 index 0000000..7855226 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/__init__.py @@ -0,0 +1,2 @@ +"""A package that contains models that represent entities. +""" diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..c238562 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/candidate.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/candidate.cpython-39.pyc new file mode 100644 index 0000000..f8fdeb6 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/candidate.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-39.pyc new file mode 100644 index 0000000..c05c9e5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/format_control.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/format_control.cpython-39.pyc new file mode 100644 index 0000000..58123bd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/format_control.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/index.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/index.cpython-39.pyc new file mode 100644 index 0000000..697f414 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/index.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/link.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/link.cpython-39.pyc new file mode 100644 index 0000000..8691cc5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/link.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/scheme.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/scheme.cpython-39.pyc new file mode 100644 index 0000000..2ee10ae Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/scheme.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-39.pyc new file mode 100644 index 0000000..cf94b86 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc new file mode 100644 index 0000000..5e69404 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/target_python.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/target_python.cpython-39.pyc new file mode 100644 index 0000000..e577890 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/target_python.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/wheel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/wheel.cpython-39.pyc new file mode 100644 index 0000000..5d079dc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/wheel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/candidate.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/candidate.py new file mode 100644 index 0000000..a4963ae --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/candidate.py @@ -0,0 +1,34 @@ +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.models.link import Link +from pip._internal.utils.models import KeyBasedCompareMixin + + +class InstallationCandidate(KeyBasedCompareMixin): + """Represents a potential "candidate" for installation.""" + + __slots__ = ["name", "version", "link"] + + def __init__(self, name: str, version: str, link: Link) -> None: + self.name = name + self.version = parse_version(version) + self.link = link + + super().__init__( + key=(self.name, self.version, self.link), + defining_class=InstallationCandidate, + ) + + def __repr__(self) -> str: + return "".format( + self.name, + self.version, + self.link, + ) + + def __str__(self) -> str: + return "{!r} candidate (version {} at {})".format( + self.name, + self.version, + self.link, + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/direct_url.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/direct_url.py new file mode 100644 index 0000000..92060d4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/direct_url.py @@ -0,0 +1,220 @@ +""" PEP 610 """ +import json +import re +import urllib.parse +from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union + +__all__ = [ + "DirectUrl", + "DirectUrlValidationError", + "DirInfo", + "ArchiveInfo", + "VcsInfo", +] + +T = TypeVar("T") + +DIRECT_URL_METADATA_NAME = "direct_url.json" +ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$") + + +class DirectUrlValidationError(Exception): + pass + + +def _get( + d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None +) -> Optional[T]: + """Get value from dictionary and verify expected type.""" + if key not in d: + return default + value = d[key] + if not isinstance(value, expected_type): + raise DirectUrlValidationError( + "{!r} has unexpected type for {} (expected {})".format( + value, key, expected_type + ) + ) + return value + + +def _get_required( + d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None +) -> T: + value = _get(d, expected_type, key, default) + if value is None: + raise DirectUrlValidationError(f"{key} must have a value") + return value + + +def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType": + infos = [info for info in infos if info is not None] + if not infos: + raise DirectUrlValidationError( + "missing one of archive_info, dir_info, vcs_info" + ) + if len(infos) > 1: + raise DirectUrlValidationError( + "more than one of archive_info, dir_info, vcs_info" + ) + assert infos[0] is not None + return infos[0] + + +def _filter_none(**kwargs: Any) -> Dict[str, Any]: + """Make dict excluding None values.""" + return {k: v for k, v in kwargs.items() if v is not None} + + +class VcsInfo: + name = "vcs_info" + + def __init__( + self, + vcs: str, + commit_id: str, + requested_revision: Optional[str] = None, + resolved_revision: Optional[str] = None, + resolved_revision_type: Optional[str] = None, + ) -> None: + self.vcs = vcs + self.requested_revision = requested_revision + self.commit_id = commit_id + self.resolved_revision = resolved_revision + self.resolved_revision_type = resolved_revision_type + + @classmethod + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]: + if d is None: + return None + return cls( + vcs=_get_required(d, str, "vcs"), + commit_id=_get_required(d, str, "commit_id"), + requested_revision=_get(d, str, "requested_revision"), + resolved_revision=_get(d, str, "resolved_revision"), + resolved_revision_type=_get(d, str, "resolved_revision_type"), + ) + + def _to_dict(self) -> Dict[str, Any]: + return _filter_none( + vcs=self.vcs, + requested_revision=self.requested_revision, + commit_id=self.commit_id, + resolved_revision=self.resolved_revision, + resolved_revision_type=self.resolved_revision_type, + ) + + +class ArchiveInfo: + name = "archive_info" + + def __init__( + self, + hash: Optional[str] = None, + ) -> None: + self.hash = hash + + @classmethod + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]: + if d is None: + return None + return cls(hash=_get(d, str, "hash")) + + def _to_dict(self) -> Dict[str, Any]: + return _filter_none(hash=self.hash) + + +class DirInfo: + name = "dir_info" + + def __init__( + self, + editable: bool = False, + ) -> None: + self.editable = editable + + @classmethod + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]: + if d is None: + return None + return cls(editable=_get_required(d, bool, "editable", default=False)) + + def _to_dict(self) -> Dict[str, Any]: + return _filter_none(editable=self.editable or None) + + +InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] + + +class DirectUrl: + def __init__( + self, + url: str, + info: InfoType, + subdirectory: Optional[str] = None, + ) -> None: + self.url = url + self.info = info + self.subdirectory = subdirectory + + def _remove_auth_from_netloc(self, netloc: str) -> str: + if "@" not in netloc: + return netloc + user_pass, netloc_no_user_pass = netloc.split("@", 1) + if ( + isinstance(self.info, VcsInfo) + and self.info.vcs == "git" + and user_pass == "git" + ): + return netloc + if ENV_VAR_RE.match(user_pass): + return netloc + return netloc_no_user_pass + + @property + def redacted_url(self) -> str: + """url with user:password part removed unless it is formed with + environment variables as specified in PEP 610, or it is ``git`` + in the case of a git URL. + """ + purl = urllib.parse.urlsplit(self.url) + netloc = self._remove_auth_from_netloc(purl.netloc) + surl = urllib.parse.urlunsplit( + (purl.scheme, netloc, purl.path, purl.query, purl.fragment) + ) + return surl + + def validate(self) -> None: + self.from_dict(self.to_dict()) + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl": + return DirectUrl( + url=_get_required(d, str, "url"), + subdirectory=_get(d, str, "subdirectory"), + info=_exactly_one_of( + [ + ArchiveInfo._from_dict(_get(d, dict, "archive_info")), + DirInfo._from_dict(_get(d, dict, "dir_info")), + VcsInfo._from_dict(_get(d, dict, "vcs_info")), + ] + ), + ) + + def to_dict(self) -> Dict[str, Any]: + res = _filter_none( + url=self.redacted_url, + subdirectory=self.subdirectory, + ) + res[self.info.name] = self.info._to_dict() + return res + + @classmethod + def from_json(cls, s: str) -> "DirectUrl": + return cls.from_dict(json.loads(s)) + + def to_json(self) -> str: + return json.dumps(self.to_dict(), sort_keys=True) + + def is_local_editable(self) -> bool: + return isinstance(self.info, DirInfo) and self.info.editable diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/format_control.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/format_control.py new file mode 100644 index 0000000..db3995e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/format_control.py @@ -0,0 +1,80 @@ +from typing import FrozenSet, Optional, Set + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import CommandError + + +class FormatControl: + """Helper for managing formats from which a package can be installed.""" + + __slots__ = ["no_binary", "only_binary"] + + def __init__( + self, + no_binary: Optional[Set[str]] = None, + only_binary: Optional[Set[str]] = None, + ) -> None: + if no_binary is None: + no_binary = set() + if only_binary is None: + only_binary = set() + + self.no_binary = no_binary + self.only_binary = only_binary + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + + if self.__slots__ != other.__slots__: + return False + + return all(getattr(self, k) == getattr(other, k) for k in self.__slots__) + + def __repr__(self) -> str: + return "{}({}, {})".format( + self.__class__.__name__, self.no_binary, self.only_binary + ) + + @staticmethod + def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None: + if value.startswith("-"): + raise CommandError( + "--no-binary / --only-binary option requires 1 argument." + ) + new = value.split(",") + while ":all:" in new: + other.clear() + target.clear() + target.add(":all:") + del new[: new.index(":all:") + 1] + # Without a none, we want to discard everything as :all: covers it + if ":none:" not in new: + return + for name in new: + if name == ":none:": + target.clear() + continue + name = canonicalize_name(name) + other.discard(name) + target.add(name) + + def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]: + result = {"binary", "source"} + if canonical_name in self.only_binary: + result.discard("source") + elif canonical_name in self.no_binary: + result.discard("binary") + elif ":all:" in self.only_binary: + result.discard("source") + elif ":all:" in self.no_binary: + result.discard("binary") + return frozenset(result) + + def disallow_binaries(self) -> None: + self.handle_mutual_excludes( + ":all:", + self.no_binary, + self.only_binary, + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/index.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/index.py new file mode 100644 index 0000000..b94c325 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/index.py @@ -0,0 +1,28 @@ +import urllib.parse + + +class PackageIndex: + """Represents a Package Index and provides easier access to endpoints""" + + __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"] + + def __init__(self, url: str, file_storage_domain: str) -> None: + super().__init__() + self.url = url + self.netloc = urllib.parse.urlsplit(url).netloc + self.simple_url = self._url_for_path("simple") + self.pypi_url = self._url_for_path("pypi") + + # This is part of a temporary hack used to block installs of PyPI + # packages which depend on external urls only necessary until PyPI can + # block such packages themselves + self.file_storage_domain = file_storage_domain + + def _url_for_path(self, path: str) -> str: + return urllib.parse.urljoin(self.url, path) + + +PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org") +TestPyPI = PackageIndex( + "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org" +) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/link.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/link.py new file mode 100644 index 0000000..6069b27 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/link.py @@ -0,0 +1,288 @@ +import functools +import logging +import os +import posixpath +import re +import urllib.parse +from typing import TYPE_CHECKING, Dict, List, NamedTuple, Optional, Tuple, Union + +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + redact_auth_from_url, + split_auth_from_netloc, + splitext, +) +from pip._internal.utils.models import KeyBasedCompareMixin +from pip._internal.utils.urls import path_to_url, url_to_path + +if TYPE_CHECKING: + from pip._internal.index.collector import HTMLPage + +logger = logging.getLogger(__name__) + + +_SUPPORTED_HASHES = ("sha1", "sha224", "sha384", "sha256", "sha512", "md5") + + +class Link(KeyBasedCompareMixin): + """Represents a parsed link from a Package Index's simple URL""" + + __slots__ = [ + "_parsed_url", + "_url", + "comes_from", + "requires_python", + "yanked_reason", + "cache_link_parsing", + ] + + def __init__( + self, + url: str, + comes_from: Optional[Union[str, "HTMLPage"]] = None, + requires_python: Optional[str] = None, + yanked_reason: Optional[str] = None, + cache_link_parsing: bool = True, + ) -> None: + """ + :param url: url of the resource pointed to (href of the link) + :param comes_from: instance of HTMLPage where the link was found, + or string. + :param requires_python: String containing the `Requires-Python` + metadata field, specified in PEP 345. This may be specified by + a data-requires-python attribute in the HTML link tag, as + described in PEP 503. + :param yanked_reason: the reason the file has been yanked, if the + file has been yanked, or None if the file hasn't been yanked. + This is the value of the "data-yanked" attribute, if present, in + a simple repository HTML link. If the file has been yanked but + no reason was provided, this should be the empty string. See + PEP 592 for more information and the specification. + :param cache_link_parsing: A flag that is used elsewhere to determine + whether resources retrieved from this link + should be cached. PyPI index urls should + generally have this set to False, for + example. + """ + + # url can be a UNC windows share + if url.startswith("\\\\"): + url = path_to_url(url) + + self._parsed_url = urllib.parse.urlsplit(url) + # Store the url as a private attribute to prevent accidentally + # trying to set a new value. + self._url = url + + self.comes_from = comes_from + self.requires_python = requires_python if requires_python else None + self.yanked_reason = yanked_reason + + super().__init__(key=url, defining_class=Link) + + self.cache_link_parsing = cache_link_parsing + + def __str__(self) -> str: + if self.requires_python: + rp = f" (requires-python:{self.requires_python})" + else: + rp = "" + if self.comes_from: + return "{} (from {}){}".format( + redact_auth_from_url(self._url), self.comes_from, rp + ) + else: + return redact_auth_from_url(str(self._url)) + + def __repr__(self) -> str: + return f"" + + @property + def url(self) -> str: + return self._url + + @property + def filename(self) -> str: + path = self.path.rstrip("/") + name = posixpath.basename(path) + if not name: + # Make sure we don't leak auth information if the netloc + # includes a username and password. + netloc, user_pass = split_auth_from_netloc(self.netloc) + return netloc + + name = urllib.parse.unquote(name) + assert name, f"URL {self._url!r} produced no filename" + return name + + @property + def file_path(self) -> str: + return url_to_path(self.url) + + @property + def scheme(self) -> str: + return self._parsed_url.scheme + + @property + def netloc(self) -> str: + """ + This can contain auth information. + """ + return self._parsed_url.netloc + + @property + def path(self) -> str: + return urllib.parse.unquote(self._parsed_url.path) + + def splitext(self) -> Tuple[str, str]: + return splitext(posixpath.basename(self.path.rstrip("/"))) + + @property + def ext(self) -> str: + return self.splitext()[1] + + @property + def url_without_fragment(self) -> str: + scheme, netloc, path, query, fragment = self._parsed_url + return urllib.parse.urlunsplit((scheme, netloc, path, query, "")) + + _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") + + @property + def egg_fragment(self) -> Optional[str]: + match = self._egg_fragment_re.search(self._url) + if not match: + return None + return match.group(1) + + _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)") + + @property + def subdirectory_fragment(self) -> Optional[str]: + match = self._subdirectory_fragment_re.search(self._url) + if not match: + return None + return match.group(1) + + _hash_re = re.compile( + r"({choices})=([a-f0-9]+)".format(choices="|".join(_SUPPORTED_HASHES)) + ) + + @property + def hash(self) -> Optional[str]: + match = self._hash_re.search(self._url) + if match: + return match.group(2) + return None + + @property + def hash_name(self) -> Optional[str]: + match = self._hash_re.search(self._url) + if match: + return match.group(1) + return None + + @property + def show_url(self) -> str: + return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0]) + + @property + def is_file(self) -> bool: + return self.scheme == "file" + + def is_existing_dir(self) -> bool: + return self.is_file and os.path.isdir(self.file_path) + + @property + def is_wheel(self) -> bool: + return self.ext == WHEEL_EXTENSION + + @property + def is_vcs(self) -> bool: + from pip._internal.vcs import vcs + + return self.scheme in vcs.all_schemes + + @property + def is_yanked(self) -> bool: + return self.yanked_reason is not None + + @property + def has_hash(self) -> bool: + return self.hash_name is not None + + def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: + """ + Return True if the link has a hash and it is allowed. + """ + if hashes is None or not self.has_hash: + return False + # Assert non-None so mypy knows self.hash_name and self.hash are str. + assert self.hash_name is not None + assert self.hash is not None + + return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash) + + +class _CleanResult(NamedTuple): + """Convert link for equivalency check. + + This is used in the resolver to check whether two URL-specified requirements + likely point to the same distribution and can be considered equivalent. This + equivalency logic avoids comparing URLs literally, which can be too strict + (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users. + + Currently this does three things: + + 1. Drop the basic auth part. This is technically wrong since a server can + serve different content based on auth, but if it does that, it is even + impossible to guarantee two URLs without auth are equivalent, since + the user can input different auth information when prompted. So the + practical solution is to assume the auth doesn't affect the response. + 2. Parse the query to avoid the ordering issue. Note that ordering under the + same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are + still considered different. + 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and + hash values, since it should have no impact the downloaded content. Note + that this drops the "egg=" part historically used to denote the requested + project (and extras), which is wrong in the strictest sense, but too many + people are supplying it inconsistently to cause superfluous resolution + conflicts, so we choose to also ignore them. + """ + + parsed: urllib.parse.SplitResult + query: Dict[str, List[str]] + subdirectory: str + hashes: Dict[str, str] + + +def _clean_link(link: Link) -> _CleanResult: + parsed = link._parsed_url + netloc = parsed.netloc.rsplit("@", 1)[-1] + # According to RFC 8089, an empty host in file: means localhost. + if parsed.scheme == "file" and not netloc: + netloc = "localhost" + fragment = urllib.parse.parse_qs(parsed.fragment) + if "egg" in fragment: + logger.debug("Ignoring egg= fragment in %s", link) + try: + # If there are multiple subdirectory values, use the first one. + # This matches the behavior of Link.subdirectory_fragment. + subdirectory = fragment["subdirectory"][0] + except (IndexError, KeyError): + subdirectory = "" + # If there are multiple hash values under the same algorithm, use the + # first one. This matches the behavior of Link.hash_value. + hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment} + return _CleanResult( + parsed=parsed._replace(netloc=netloc, query="", fragment=""), + query=urllib.parse.parse_qs(parsed.query), + subdirectory=subdirectory, + hashes=hashes, + ) + + +@functools.lru_cache(maxsize=None) +def links_equivalent(link1: Link, link2: Link) -> bool: + return _clean_link(link1) == _clean_link(link2) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/scheme.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/scheme.py new file mode 100644 index 0000000..f51190a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/scheme.py @@ -0,0 +1,31 @@ +""" +For types associated with installation schemes. + +For a general overview of available schemes and their context, see +https://docs.python.org/3/install/index.html#alternate-installation. +""" + + +SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"] + + +class Scheme: + """A Scheme holds paths which are used as the base directories for + artifacts associated with a Python package. + """ + + __slots__ = SCHEME_KEYS + + def __init__( + self, + platlib: str, + purelib: str, + headers: str, + scripts: str, + data: str, + ) -> None: + self.platlib = platlib + self.purelib = purelib + self.headers = headers + self.scripts = scripts + self.data = data diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/search_scope.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/search_scope.py new file mode 100644 index 0000000..e4e54c2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/search_scope.py @@ -0,0 +1,129 @@ +import itertools +import logging +import os +import posixpath +import urllib.parse +from typing import List + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.models.index import PyPI +from pip._internal.utils.compat import has_tls +from pip._internal.utils.misc import normalize_path, redact_auth_from_url + +logger = logging.getLogger(__name__) + + +class SearchScope: + + """ + Encapsulates the locations that pip is configured to search. + """ + + __slots__ = ["find_links", "index_urls"] + + @classmethod + def create( + cls, + find_links: List[str], + index_urls: List[str], + ) -> "SearchScope": + """ + Create a SearchScope object after normalizing the `find_links`. + """ + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + built_find_links: List[str] = [] + for link in find_links: + if link.startswith("~"): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + built_find_links.append(link) + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not has_tls(): + for link in itertools.chain(index_urls, built_find_links): + parsed = urllib.parse.urlparse(link) + if parsed.scheme == "https": + logger.warning( + "pip is configured with locations that require " + "TLS/SSL, however the ssl module in Python is not " + "available." + ) + break + + return cls( + find_links=built_find_links, + index_urls=index_urls, + ) + + def __init__( + self, + find_links: List[str], + index_urls: List[str], + ) -> None: + self.find_links = find_links + self.index_urls = index_urls + + def get_formatted_locations(self) -> str: + lines = [] + redacted_index_urls = [] + if self.index_urls and self.index_urls != [PyPI.simple_url]: + for url in self.index_urls: + + redacted_index_url = redact_auth_from_url(url) + + # Parse the URL + purl = urllib.parse.urlsplit(redacted_index_url) + + # URL is generally invalid if scheme and netloc is missing + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not purl.scheme and not purl.netloc: + logger.warning( + 'The index url "%s" seems invalid, please provide a scheme.', + redacted_index_url, + ) + + redacted_index_urls.append(redacted_index_url) + + lines.append( + "Looking in indexes: {}".format(", ".join(redacted_index_urls)) + ) + + if self.find_links: + lines.append( + "Looking in links: {}".format( + ", ".join(redact_auth_from_url(url) for url in self.find_links) + ) + ) + return "\n".join(lines) + + def get_index_urls_locations(self, project_name: str) -> List[str]: + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url: str) -> str: + loc = posixpath.join( + url, urllib.parse.quote(canonicalize_name(project_name)) + ) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith("/"): + loc = loc + "/" + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/selection_prefs.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/selection_prefs.py new file mode 100644 index 0000000..977bc4c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/selection_prefs.py @@ -0,0 +1,51 @@ +from typing import Optional + +from pip._internal.models.format_control import FormatControl + + +class SelectionPreferences: + """ + Encapsulates the candidate selection preferences for downloading + and installing files. + """ + + __slots__ = [ + "allow_yanked", + "allow_all_prereleases", + "format_control", + "prefer_binary", + "ignore_requires_python", + ] + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + allow_yanked: bool, + allow_all_prereleases: bool = False, + format_control: Optional[FormatControl] = None, + prefer_binary: bool = False, + ignore_requires_python: Optional[bool] = None, + ) -> None: + """Create a SelectionPreferences object. + + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param format_control: A FormatControl object or None. Used to control + the selection of source packages / binary packages when consulting + the index and links. + :param prefer_binary: Whether to prefer an old, but valid, binary + dist over a new source dist. + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self.allow_yanked = allow_yanked + self.allow_all_prereleases = allow_all_prereleases + self.format_control = format_control + self.prefer_binary = prefer_binary + self.ignore_requires_python = ignore_requires_python diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/target_python.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/target_python.py new file mode 100644 index 0000000..744bd7e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/target_python.py @@ -0,0 +1,110 @@ +import sys +from typing import List, Optional, Tuple + +from pip._vendor.packaging.tags import Tag + +from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot +from pip._internal.utils.misc import normalize_version_info + + +class TargetPython: + + """ + Encapsulates the properties of a Python interpreter one is targeting + for a package install, download, etc. + """ + + __slots__ = [ + "_given_py_version_info", + "abis", + "implementation", + "platforms", + "py_version", + "py_version_info", + "_valid_tags", + ] + + def __init__( + self, + platforms: Optional[List[str]] = None, + py_version_info: Optional[Tuple[int, ...]] = None, + abis: Optional[List[str]] = None, + implementation: Optional[str] = None, + ) -> None: + """ + :param platforms: A list of strings or None. If None, searches for + packages that are supported by the current system. Otherwise, will + find packages that can be built on the platforms passed in. These + packages will only be downloaded for distribution: they will + not be built locally. + :param py_version_info: An optional tuple of ints representing the + Python version information to use (e.g. `sys.version_info[:3]`). + This can have length 1, 2, or 3 when provided. + :param abis: A list of strings or None. This is passed to + compatibility_tags.py's get_supported() function as is. + :param implementation: A string or None. This is passed to + compatibility_tags.py's get_supported() function as is. + """ + # Store the given py_version_info for when we call get_supported(). + self._given_py_version_info = py_version_info + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + py_version = ".".join(map(str, py_version_info[:2])) + + self.abis = abis + self.implementation = implementation + self.platforms = platforms + self.py_version = py_version + self.py_version_info = py_version_info + + # This is used to cache the return value of get_tags(). + self._valid_tags: Optional[List[Tag]] = None + + def format_given(self) -> str: + """ + Format the given, non-None attributes for display. + """ + display_version = None + if self._given_py_version_info is not None: + display_version = ".".join( + str(part) for part in self._given_py_version_info + ) + + key_values = [ + ("platforms", self.platforms), + ("version_info", display_version), + ("abis", self.abis), + ("implementation", self.implementation), + ] + return " ".join( + f"{key}={value!r}" for key, value in key_values if value is not None + ) + + def get_tags(self) -> List[Tag]: + """ + Return the supported PEP 425 tags to check wheel candidates against. + + The tags are returned in order of preference (most preferred first). + """ + if self._valid_tags is None: + # Pass versions=None if no py_version_info was given since + # versions=None uses special default logic. + py_version_info = self._given_py_version_info + if py_version_info is None: + version = None + else: + version = version_info_to_nodot(py_version_info) + + tags = get_supported( + version=version, + platforms=self.platforms, + abis=self.abis, + impl=self.implementation, + ) + self._valid_tags = tags + + return self._valid_tags diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/models/wheel.py b/.venv/lib/python3.9/site-packages/pip/_internal/models/wheel.py new file mode 100644 index 0000000..e091612 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/models/wheel.py @@ -0,0 +1,89 @@ +"""Represents a wheel file and provides access to the various parts of the +name that have meaning. +""" +import re +from typing import Dict, Iterable, List + +from pip._vendor.packaging.tags import Tag + +from pip._internal.exceptions import InvalidWheelFilename + + +class Wheel: + """A wheel file""" + + wheel_file_re = re.compile( + r"""^(?P(?P.+?)-(?P.*?)) + ((-(?P\d[^-]*?))?-(?P.+?)-(?P.+?)-(?P.+?) + \.whl|\.dist-info)$""", + re.VERBOSE, + ) + + def __init__(self, filename: str) -> None: + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.") + self.filename = filename + self.name = wheel_info.group("name").replace("_", "-") + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group("ver").replace("_", "-") + self.build_tag = wheel_info.group("build") + self.pyversions = wheel_info.group("pyver").split(".") + self.abis = wheel_info.group("abi").split(".") + self.plats = wheel_info.group("plat").split(".") + + # All the tag combinations from this file + self.file_tags = { + Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats + } + + def get_formatted_file_tags(self) -> List[str]: + """Return the wheel's tags as a sorted list of strings.""" + return sorted(str(tag) for tag in self.file_tags) + + def support_index_min(self, tags: List[Tag]) -> int: + """Return the lowest index that one of the wheel's file_tag combinations + achieves in the given list of supported tags. + + For example, if there are 8 supported tags and one of the file tags + is first in the list, then return 0. + + :param tags: the PEP 425 tags to check the wheel against, in order + with most preferred first. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min(tags.index(tag) for tag in self.file_tags if tag in tags) + + def find_most_preferred_tag( + self, tags: List[Tag], tag_to_priority: Dict[Tag, int] + ) -> int: + """Return the priority of the most preferred tag that one of the wheel's file + tag combinations achieves in the given list of supported tags using the given + tag_to_priority mapping, where lower priorities are more-preferred. + + This is used in place of support_index_min in some cases in order to avoid + an expensive linear scan of a large list of tags. + + :param tags: the PEP 425 tags to check the wheel against. + :param tag_to_priority: a mapping from tag to priority of that tag, where + lower is more preferred. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min( + tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority + ) + + def supported(self, tags: Iterable[Tag]) -> bool: + """Return whether the wheel is compatible with one of the given tags. + + :param tags: the PEP 425 tags to check the wheel against. + """ + return not self.file_tags.isdisjoint(tags) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/network/__init__.py new file mode 100644 index 0000000..b51bde9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/network/__init__.py @@ -0,0 +1,2 @@ +"""Contains purely network-related utilities. +""" diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..0fd2a25 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/auth.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/auth.cpython-39.pyc new file mode 100644 index 0000000..79fff3f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/auth.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/cache.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/cache.cpython-39.pyc new file mode 100644 index 0000000..57178ac Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/cache.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/download.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/download.cpython-39.pyc new file mode 100644 index 0000000..f191d13 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/download.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc new file mode 100644 index 0000000..39c38d9 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/session.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/session.cpython-39.pyc new file mode 100644 index 0000000..ba0d646 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/session.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/utils.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000..f20b3da Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/utils.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc new file mode 100644 index 0000000..64fb8e9 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/auth.py b/.venv/lib/python3.9/site-packages/pip/_internal/network/auth.py new file mode 100644 index 0000000..ca42798 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/network/auth.py @@ -0,0 +1,323 @@ +"""Network Authentication Helpers + +Contains interface (MultiDomainBasicAuth) and associated glue code for +providing credentials in the context of network requests. +""" + +import urllib.parse +from typing import Any, Dict, List, Optional, Tuple + +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.models import Request, Response +from pip._vendor.requests.utils import get_netrc_auth + +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ( + ask, + ask_input, + ask_password, + remove_auth_from_url, + split_auth_netloc_from_url, +) +from pip._internal.vcs.versioncontrol import AuthInfo + +logger = getLogger(__name__) + +Credentials = Tuple[str, str, str] + +try: + import keyring +except ImportError: + keyring = None # type: ignore[assignment] +except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", + str(exc), + ) + keyring = None # type: ignore[assignment] + + +def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]: + """Return the tuple auth for a given url from keyring.""" + global keyring + if not url or not keyring: + return None + + try: + try: + get_credential = keyring.get_credential + except AttributeError: + pass + else: + logger.debug("Getting credentials from keyring for %s", url) + cred = get_credential(url, username) + if cred is not None: + return cred.username, cred.password + return None + + if username: + logger.debug("Getting password from keyring for %s", url) + password = keyring.get_password(url, username) + if password: + return username, password + + except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", + str(exc), + ) + keyring = None # type: ignore[assignment] + return None + + +class MultiDomainBasicAuth(AuthBase): + def __init__( + self, prompting: bool = True, index_urls: Optional[List[str]] = None + ) -> None: + self.prompting = prompting + self.index_urls = index_urls + self.passwords: Dict[str, AuthInfo] = {} + # When the user is prompted to enter credentials and keyring is + # available, we will offer to save them. If the user accepts, + # this value is set to the credentials they entered. After the + # request authenticates, the caller should call + # ``save_credentials`` to save these. + self._credentials_to_save: Optional[Credentials] = None + + def _get_index_url(self, url: str) -> Optional[str]: + """Return the original index URL matching the requested URL. + + Cached or dynamically generated credentials may work against + the original index URL rather than just the netloc. + + The provided url should have had its username and password + removed already. If the original index url had credentials then + they will be included in the return value. + + Returns None if no matching index was found, or if --no-index + was specified by the user. + """ + if not url or not self.index_urls: + return None + + for u in self.index_urls: + prefix = remove_auth_from_url(u).rstrip("/") + "/" + if url.startswith(prefix): + return u + return None + + def _get_new_credentials( + self, + original_url: str, + allow_netrc: bool = True, + allow_keyring: bool = False, + ) -> AuthInfo: + """Find and return credentials for the specified URL.""" + # Split the credentials and netloc from the url. + url, netloc, url_user_password = split_auth_netloc_from_url( + original_url, + ) + + # Start with the credentials embedded in the url + username, password = url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in url for %s", netloc) + return url_user_password + + # Find a matching index url for this request + index_url = self._get_index_url(url) + if index_url: + # Split the credentials from the url. + index_info = split_auth_netloc_from_url(index_url) + if index_info: + index_url, _, index_url_user_password = index_info + logger.debug("Found index url %s", index_url) + + # If an index URL was found, try its embedded credentials + if index_url and index_url_user_password[0] is not None: + username, password = index_url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in index url for %s", netloc) + return index_url_user_password + + # Get creds from netrc if we still don't have them + if allow_netrc: + netrc_auth = get_netrc_auth(original_url) + if netrc_auth: + logger.debug("Found credentials in netrc for %s", netloc) + return netrc_auth + + # If we don't have a password and keyring is available, use it. + if allow_keyring: + # The index url is more specific than the netloc, so try it first + # fmt: off + kr_auth = ( + get_keyring_auth(index_url, username) or + get_keyring_auth(netloc, username) + ) + # fmt: on + if kr_auth: + logger.debug("Found credentials in keyring for %s", netloc) + return kr_auth + + return username, password + + def _get_url_and_credentials( + self, original_url: str + ) -> Tuple[str, Optional[str], Optional[str]]: + """Return the credentials to use for the provided URL. + + If allowed, netrc and keyring may be used to obtain the + correct credentials. + + Returns (url_without_credentials, username, password). Note + that even if the original URL contains credentials, this + function may return a different username and password. + """ + url, netloc, _ = split_auth_netloc_from_url(original_url) + + # Try to get credentials from original url + username, password = self._get_new_credentials(original_url) + + # If credentials not found, use any stored credentials for this netloc. + # Do this if either the username or the password is missing. + # This accounts for the situation in which the user has specified + # the username in the index url, but the password comes from keyring. + if (username is None or password is None) and netloc in self.passwords: + un, pw = self.passwords[netloc] + # It is possible that the cached credentials are for a different username, + # in which case the cache should be ignored. + if username is None or username == un: + username, password = un, pw + + if username is not None or password is not None: + # Convert the username and password if they're None, so that + # this netloc will show up as "cached" in the conditional above. + # Further, HTTPBasicAuth doesn't accept None, so it makes sense to + # cache the value that is going to be used. + username = username or "" + password = password or "" + + # Store any acquired credentials. + self.passwords[netloc] = (username, password) + + assert ( + # Credentials were found + (username is not None and password is not None) + # Credentials were not found + or (username is None and password is None) + ), f"Could not load credentials from url: {original_url}" + + return url, username, password + + def __call__(self, req: Request) -> Request: + # Get credentials for this request + url, username, password = self._get_url_and_credentials(req.url) + + # Set the url of the request to the url without any credentials + req.url = url + + if username is not None and password is not None: + # Send the basic auth with this request + req = HTTPBasicAuth(username, password)(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + # Factored out to allow for easy patching in tests + def _prompt_for_password( + self, netloc: str + ) -> Tuple[Optional[str], Optional[str], bool]: + username = ask_input(f"User for {netloc}: ") + if not username: + return None, None, False + auth = get_keyring_auth(netloc, username) + if auth and auth[0] is not None and auth[1] is not None: + return auth[0], auth[1], False + password = ask_password("Password: ") + return username, password, True + + # Factored out to allow for easy patching in tests + def _should_save_password_to_keyring(self) -> bool: + if not keyring: + return False + return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" + + def handle_401(self, resp: Response, **kwargs: Any) -> Response: + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + # We are not able to prompt the user so simply return the response + if not self.prompting: + return resp + + parsed = urllib.parse.urlparse(resp.url) + + # Query the keyring for credentials: + username, password = self._get_new_credentials( + resp.url, + allow_netrc=False, + allow_keyring=True, + ) + + # Prompt the user for a new username and password + save = False + if not username and not password: + username, password, save = self._prompt_for_password(parsed.netloc) + + # Store the new username and password to use for future requests + self._credentials_to_save = None + if username is not None and password is not None: + self.passwords[parsed.netloc] = (username, password) + + # Prompt to save the password to keyring + if save and self._should_save_password_to_keyring(): + self._credentials_to_save = (parsed.netloc, username, password) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + req.register_hook("response", self.warn_on_401) + + # On successful request, save the credentials that were used to + # keyring. (Note that if the user responded "no" above, this member + # is not set and nothing will be saved.) + if self._credentials_to_save: + req.register_hook("response", self.save_credentials) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def warn_on_401(self, resp: Response, **kwargs: Any) -> None: + """Response callback to warn about incorrect credentials.""" + if resp.status_code == 401: + logger.warning( + "401 Error, Credentials not correct for %s", + resp.request.url, + ) + + def save_credentials(self, resp: Response, **kwargs: Any) -> None: + """Response callback to save credentials on success.""" + assert keyring is not None, "should never reach here without keyring" + if not keyring: + return + + creds = self._credentials_to_save + self._credentials_to_save = None + if creds and resp.status_code < 400: + try: + logger.info("Saving credentials to keyring") + keyring.set_password(*creds) + except Exception: + logger.exception("Failed to save credentials") diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/cache.py b/.venv/lib/python3.9/site-packages/pip/_internal/network/cache.py new file mode 100644 index 0000000..2d915e6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/network/cache.py @@ -0,0 +1,69 @@ +"""HTTP cache implementation. +""" + +import os +from contextlib import contextmanager +from typing import Iterator, Optional + +from pip._vendor.cachecontrol.cache import BaseCache +from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.requests.models import Response + +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import ensure_dir + + +def is_from_cache(response: Response) -> bool: + return getattr(response, "from_cache", False) + + +@contextmanager +def suppressed_cache_errors() -> Iterator[None]: + """If we can't access the cache then we can just skip caching and process + requests as if caching wasn't enabled. + """ + try: + yield + except OSError: + pass + + +class SafeFileCache(BaseCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + """ + + def __init__(self, directory: str) -> None: + assert directory is not None, "Cache directory must not be None." + super().__init__() + self.directory = directory + + def _get_cache_path(self, name: str) -> str: + # From cachecontrol.caches.file_cache.FileCache._fn, brought into our + # class for backwards-compatibility and to avoid using a non-public + # method. + hashed = FileCache.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key: str) -> Optional[bytes]: + path = self._get_cache_path(key) + with suppressed_cache_errors(): + with open(path, "rb") as f: + return f.read() + + def set(self, key: str, value: bytes) -> None: + path = self._get_cache_path(key) + with suppressed_cache_errors(): + ensure_dir(os.path.dirname(path)) + + with adjacent_tmp_file(path) as f: + f.write(value) + + replace(f.name, path) + + def delete(self, key: str) -> None: + path = self._get_cache_path(key) + with suppressed_cache_errors(): + os.remove(path) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/download.py b/.venv/lib/python3.9/site-packages/pip/_internal/network/download.py new file mode 100644 index 0000000..47af547 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/network/download.py @@ -0,0 +1,184 @@ +"""Download files with progress indicators. +""" +import cgi +import logging +import mimetypes +import os +from typing import Iterable, Optional, Tuple + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.cli.progress_bars import DownloadProgressProvider +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.index import PyPI +from pip._internal.models.link import Link +from pip._internal.network.cache import is_from_cache +from pip._internal.network.session import PipSession +from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks +from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext + +logger = logging.getLogger(__name__) + + +def _get_http_response_size(resp: Response) -> Optional[int]: + try: + return int(resp.headers["content-length"]) + except (ValueError, KeyError, TypeError): + return None + + +def _prepare_download( + resp: Response, + link: Link, + progress_bar: str, +) -> Iterable[bytes]: + total_length = _get_http_response_size(resp) + + if link.netloc == PyPI.file_storage_domain: + url = link.show_url + else: + url = link.url_without_fragment + + logged_url = redact_auth_from_url(url) + + if total_length: + logged_url = "{} ({})".format(logged_url, format_size(total_length)) + + if is_from_cache(resp): + logger.info("Using cached %s", logged_url) + else: + logger.info("Downloading %s", logged_url) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif is_from_cache(resp): + show_progress = False + elif not total_length: + show_progress = True + elif total_length > (40 * 1000): + show_progress = True + else: + show_progress = False + + chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) + + if not show_progress: + return chunks + + return DownloadProgressProvider(progress_bar, max=total_length)(chunks) + + +def sanitize_content_filename(filename: str) -> str: + """ + Sanitize the "filename" value from a Content-Disposition header. + """ + return os.path.basename(filename) + + +def parse_content_disposition(content_disposition: str, default_filename: str) -> str: + """ + Parse the "filename" value from a Content-Disposition header, and + return the default filename if the result is empty. + """ + _type, params = cgi.parse_header(content_disposition) + filename = params.get("filename") + if filename: + # We need to sanitize the filename to prevent directory traversal + # in case the filename contains ".." path parts. + filename = sanitize_content_filename(filename) + return filename or default_filename + + +def _get_http_response_filename(resp: Response, link: Link) -> str: + """Get an ideal filename from the given HTTP response, falling back to + the link filename if not provided. + """ + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get("content-disposition") + if content_disposition: + filename = parse_content_disposition(content_disposition, filename) + ext: Optional[str] = splitext(filename)[1] + if not ext: + ext = mimetypes.guess_extension(resp.headers.get("content-type", "")) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + return filename + + +def _http_get_download(session: PipSession, link: Link) -> Response: + target_url = link.url.split("#", 1)[0] + resp = session.get(target_url, headers=HEADERS, stream=True) + raise_for_status(resp) + return resp + + +class Downloader: + def __init__( + self, + session: PipSession, + progress_bar: str, + ) -> None: + self._session = session + self._progress_bar = progress_bar + + def __call__(self, link: Link, location: str) -> Tuple[str, str]: + """Download the file given by link into location.""" + try: + resp = _http_get_download(self._session, link) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", e.response.status_code, link + ) + raise + + filename = _get_http_response_filename(resp, link) + filepath = os.path.join(location, filename) + + chunks = _prepare_download(resp, link, self._progress_bar) + with open(filepath, "wb") as content_file: + for chunk in chunks: + content_file.write(chunk) + content_type = resp.headers.get("Content-Type", "") + return filepath, content_type + + +class BatchDownloader: + def __init__( + self, + session: PipSession, + progress_bar: str, + ) -> None: + self._session = session + self._progress_bar = progress_bar + + def __call__( + self, links: Iterable[Link], location: str + ) -> Iterable[Tuple[Link, Tuple[str, str]]]: + """Download the files given by links into location.""" + for link in links: + try: + resp = _http_get_download(self._session, link) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", + e.response.status_code, + link, + ) + raise + + filename = _get_http_response_filename(resp, link) + filepath = os.path.join(location, filename) + + chunks = _prepare_download(resp, link, self._progress_bar) + with open(filepath, "wb") as content_file: + for chunk in chunks: + content_file.write(chunk) + content_type = resp.headers.get("Content-Type", "") + yield link, (filepath, content_type) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/lazy_wheel.py b/.venv/lib/python3.9/site-packages/pip/_internal/network/lazy_wheel.py new file mode 100644 index 0000000..c9e44d5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/network/lazy_wheel.py @@ -0,0 +1,210 @@ +"""Lazy ZIP over HTTP""" + +__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"] + +from bisect import bisect_left, bisect_right +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from typing import Any, Dict, Iterator, List, Optional, Tuple +from zipfile import BadZipfile, ZipFile + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution +from pip._internal.network.session import PipSession +from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks + + +class HTTPRangeRequestUnsupported(Exception): + pass + + +def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution: + """Return a distribution object from the given wheel URL. + + This uses HTTP range requests to only fetch the potion of the wheel + containing metadata, just enough for the object to be constructed. + If such requests are not supported, HTTPRangeRequestUnsupported + is raised. + """ + with LazyZipOverHTTP(url, session) as zf: + # For read-only ZIP files, ZipFile only needs methods read, + # seek, seekable and tell, not the whole IO protocol. + wheel = MemoryWheel(zf.name, zf) # type: ignore + # After context manager exit, wheel.name + # is an invalid file by intention. + return get_wheel_distribution(wheel, canonicalize_name(name)) + + +class LazyZipOverHTTP: + """File-like object mapped to a ZIP file over HTTP. + + This uses HTTP range requests to lazily fetch the file's content, + which is supposed to be fed to ZipFile. If such requests are not + supported by the server, raise HTTPRangeRequestUnsupported + during initialization. + """ + + def __init__( + self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE + ) -> None: + head = session.head(url, headers=HEADERS) + raise_for_status(head) + assert head.status_code == 200 + self._session, self._url, self._chunk_size = session, url, chunk_size + self._length = int(head.headers["Content-Length"]) + self._file = NamedTemporaryFile() + self.truncate(self._length) + self._left: List[int] = [] + self._right: List[int] = [] + if "bytes" not in head.headers.get("Accept-Ranges", "none"): + raise HTTPRangeRequestUnsupported("range request is not supported") + self._check_zip() + + @property + def mode(self) -> str: + """Opening mode, which is always rb.""" + return "rb" + + @property + def name(self) -> str: + """Path to the underlying file.""" + return self._file.name + + def seekable(self) -> bool: + """Return whether random access is supported, which is True.""" + return True + + def close(self) -> None: + """Close the file.""" + self._file.close() + + @property + def closed(self) -> bool: + """Whether the file is closed.""" + return self._file.closed + + def read(self, size: int = -1) -> bytes: + """Read up to size bytes from the object and return them. + + As a convenience, if size is unspecified or -1, + all bytes until EOF are returned. Fewer than + size bytes may be returned if EOF is reached. + """ + download_size = max(size, self._chunk_size) + start, length = self.tell(), self._length + stop = length if size < 0 else min(start + download_size, length) + start = max(0, stop - download_size) + self._download(start, stop - 1) + return self._file.read(size) + + def readable(self) -> bool: + """Return whether the file is readable, which is True.""" + return True + + def seek(self, offset: int, whence: int = 0) -> int: + """Change stream position and return the new absolute position. + + Seek to offset relative position indicated by whence: + * 0: Start of stream (the default). pos should be >= 0; + * 1: Current position - pos may be negative; + * 2: End of stream - pos usually negative. + """ + return self._file.seek(offset, whence) + + def tell(self) -> int: + """Return the current position.""" + return self._file.tell() + + def truncate(self, size: Optional[int] = None) -> int: + """Resize the stream to the given size in bytes. + + If size is unspecified resize to the current position. + The current stream position isn't changed. + + Return the new file size. + """ + return self._file.truncate(size) + + def writable(self) -> bool: + """Return False.""" + return False + + def __enter__(self) -> "LazyZipOverHTTP": + self._file.__enter__() + return self + + def __exit__(self, *exc: Any) -> Optional[bool]: + return self._file.__exit__(*exc) + + @contextmanager + def _stay(self) -> Iterator[None]: + """Return a context manager keeping the position. + + At the end of the block, seek back to original position. + """ + pos = self.tell() + try: + yield + finally: + self.seek(pos) + + def _check_zip(self) -> None: + """Check and download until the file is a valid ZIP.""" + end = self._length - 1 + for start in reversed(range(0, end, self._chunk_size)): + self._download(start, end) + with self._stay(): + try: + # For read-only ZIP files, ZipFile only needs + # methods read, seek, seekable and tell. + ZipFile(self) # type: ignore + except BadZipfile: + pass + else: + break + + def _stream_response( + self, start: int, end: int, base_headers: Dict[str, str] = HEADERS + ) -> Response: + """Return HTTP response to a range request from start to end.""" + headers = base_headers.copy() + headers["Range"] = f"bytes={start}-{end}" + # TODO: Get range requests to be correctly cached + headers["Cache-Control"] = "no-cache" + return self._session.get(self._url, headers=headers, stream=True) + + def _merge( + self, start: int, end: int, left: int, right: int + ) -> Iterator[Tuple[int, int]]: + """Return an iterator of intervals to be fetched. + + Args: + start (int): Start of needed interval + end (int): End of needed interval + left (int): Index of first overlapping downloaded data + right (int): Index after last overlapping downloaded data + """ + lslice, rslice = self._left[left:right], self._right[left:right] + i = start = min([start] + lslice[:1]) + end = max([end] + rslice[-1:]) + for j, k in zip(lslice, rslice): + if j > i: + yield i, j - 1 + i = k + 1 + if i <= end: + yield i, end + self._left[left:right], self._right[left:right] = [start], [end] + + def _download(self, start: int, end: int) -> None: + """Download bytes from start to end inclusively.""" + with self._stay(): + left = bisect_left(self._right, start) + right = bisect_right(self._left, end) + for start, end in self._merge(start, end, left, right): + response = self._stream_response(start, end) + response.raise_for_status() + self.seek(start) + for chunk in response_chunks(response, self._chunk_size): + self._file.write(chunk) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/session.py b/.venv/lib/python3.9/site-packages/pip/_internal/network/session.py new file mode 100644 index 0000000..cbe743b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/network/session.py @@ -0,0 +1,454 @@ +"""PipSession and supporting code, containing all pip-specific +network request configuration and behavior. +""" + +import email.utils +import io +import ipaddress +import json +import logging +import mimetypes +import os +import platform +import shutil +import subprocess +import sys +import urllib.parse +import warnings +from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union + +from pip._vendor import requests, urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter +from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter +from pip._vendor.requests.models import PreparedRequest, Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.urllib3.connectionpool import ConnectionPool +from pip._vendor.urllib3.exceptions import InsecureRequestWarning + +from pip import __version__ +from pip._internal.metadata import get_default_environment +from pip._internal.models.link import Link +from pip._internal.network.auth import MultiDomainBasicAuth +from pip._internal.network.cache import SafeFileCache + +# Import ssl from compat so the initial import occurs in only one place. +from pip._internal.utils.compat import has_tls +from pip._internal.utils.glibc import libc_ver +from pip._internal.utils.misc import build_url_from_netloc, parse_netloc +from pip._internal.utils.urls import url_to_path + +logger = logging.getLogger(__name__) + +SecureOrigin = Tuple[str, str, Optional[Union[int, str]]] + + +# Ignore warning raised when using --trusted-host. +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +SECURE_ORIGINS: List[SecureOrigin] = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] + + +# These are environment variables present when running under various +# CI systems. For each variable, some CI systems that use the variable +# are indicated. The collection was chosen so that for each of a number +# of popular systems, at least one of the environment variables is used. +# This list is used to provide some indication of and lower bound for +# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. +# For more background, see: https://github.com/pypa/pip/issues/5499 +CI_ENVIRONMENT_VARIABLES = ( + # Azure Pipelines + "BUILD_BUILDID", + # Jenkins + "BUILD_ID", + # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI + "CI", + # Explicit environment variable. + "PIP_IS_CI", +) + + +def looks_like_ci() -> bool: + """ + Return whether it looks like pip is running under CI. + """ + # We don't use the method of checking for a tty (e.g. using isatty()) + # because some CI systems mimic a tty (e.g. Travis CI). Thus that + # method doesn't provide definitive information in either direction. + return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) + + +def user_agent() -> str: + """ + Return a string representing the user agent. + """ + data: Dict[str, Any] = { + "installer": {"name": "pip", "version": __version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } + + if data["implementation"]["name"] == "CPython": + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == "PyPy": + pypy_version_info = sys.pypy_version_info # type: ignore + if pypy_version_info.releaselevel == "final": + pypy_version_info = pypy_version_info[:3] + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == "Jython": + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == "IronPython": + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + from pip._vendor import distro + + linux_distribution = distro.name(), distro.version(), distro.codename() + distro_infos: Dict[str, Any] = dict( + filter( + lambda x: x[1], + zip(["name", "version", "id"], linux_distribution), + ) + ) + libc = dict( + filter( + lambda x: x[1], + zip(["lib", "version"], libc_ver()), + ) + ) + if libc: + distro_infos["libc"] = libc + if distro_infos: + data["distro"] = distro_infos + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + if has_tls(): + import _ssl as ssl + + data["openssl_version"] = ssl.OPENSSL_VERSION + + setuptools_dist = get_default_environment().get_distribution("setuptools") + if setuptools_dist is not None: + data["setuptools_version"] = str(setuptools_dist.version) + + if shutil.which("rustc") is not None: + # If for any reason `rustc --version` fails, silently ignore it + try: + rustc_output = subprocess.check_output( + ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5 + ) + except Exception: + pass + else: + if rustc_output.startswith(b"rustc "): + # The format of `rustc --version` is: + # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'` + # We extract just the middle (1.52.1) part + data["rustc_version"] = rustc_output.split(b" ")[1].decode() + + # Use None rather than False so as not to give the impression that + # pip knows it is not being run under CI. Rather, it is a null or + # inconclusive result. Also, we include some value rather than no + # value to make it easier to know that the check has been run. + data["ci"] = True if looks_like_ci() else None + + user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") + if user_data is not None: + data["user_data"] = user_data + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class LocalFSAdapter(BaseAdapter): + def send( + self, + request: PreparedRequest, + stream: bool = False, + timeout: Optional[Union[float, Tuple[float, float]]] = None, + verify: Union[bool, str] = True, + cert: Optional[Union[str, Tuple[str, str]]] = None, + proxies: Optional[Mapping[str, str]] = None, + ) -> Response: + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + # format the exception raised as a io.BytesIO object, + # to return a better error message: + resp.status_code = 404 + resp.reason = type(exc).__name__ + resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8")) + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict( + { + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + } + ) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self) -> None: + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + def cert_verify( + self, + conn: ConnectionPool, + url: str, + verify: Union[bool, str], + cert: Optional[Union[str, Tuple[str, str]]], + ) -> None: + super().cert_verify(conn=conn, url=url, verify=False, cert=cert) + + +class InsecureCacheControlAdapter(CacheControlAdapter): + def cert_verify( + self, + conn: ConnectionPool, + url: str, + verify: Union[bool, str], + cert: Optional[Union[str, Tuple[str, str]]], + ) -> None: + super().cert_verify(conn=conn, url=url, verify=False, cert=cert) + + +class PipSession(requests.Session): + + timeout: Optional[int] = None + + def __init__( + self, + *args: Any, + retries: int = 0, + cache: Optional[str] = None, + trusted_hosts: Sequence[str] = (), + index_urls: Optional[List[str]] = None, + **kwargs: Any, + ) -> None: + """ + :param trusted_hosts: Domains not to emit warnings for when not using + HTTPS. + """ + super().__init__(*args, **kwargs) + + # Namespace the attribute with "pip_" just in case to prevent + # possible conflicts with the base class. + self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = [] + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth(index_urls=index_urls) + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interrupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + # A 500 may indicate transient error in Amazon S3 + # A 520 or 527 - may indicate transient error in CloudFlare + status_forcelist=[500, 503, 520, 527], + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) # type: ignore + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching so we'll use it for all http:// URLs. + # If caching is disabled, we will also use it for + # https:// hosts that we've marked as ignoring + # TLS errors for (trusted-hosts). + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + + # We want to _only_ cache responses on securely fetched origins or when + # the host is specified as trusted. We do this because + # we can't validate the response of an insecurely/untrusted fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + self._trusted_host_adapter = InsecureCacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries) + self._trusted_host_adapter = insecure_adapter + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + for host in trusted_hosts: + self.add_trusted_host(host, suppress_logging=True) + + def update_index_urls(self, new_index_urls: List[str]) -> None: + """ + :param new_index_urls: New index urls to update the authentication + handler with. + """ + self.auth.index_urls = new_index_urls + + def add_trusted_host( + self, host: str, source: Optional[str] = None, suppress_logging: bool = False + ) -> None: + """ + :param host: It is okay to provide a host that has previously been + added. + :param source: An optional source string, for logging where the host + string came from. + """ + if not suppress_logging: + msg = f"adding trusted host: {host!r}" + if source is not None: + msg += f" (from {source})" + logger.info(msg) + + host_port = parse_netloc(host) + if host_port not in self.pip_trusted_origins: + self.pip_trusted_origins.append(host_port) + + self.mount( + build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter + ) + self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter) + if not host_port[1]: + self.mount( + build_url_from_netloc(host, scheme="http") + ":", + self._trusted_host_adapter, + ) + # Mount wildcard ports for the same host. + self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter) + + def iter_secure_origins(self) -> Iterator[SecureOrigin]: + yield from SECURE_ORIGINS + for host, port in self.pip_trusted_origins: + yield ("*", host, "*" if port is None else port) + + def is_secure_origin(self, location: Link) -> bool: + # Determine if this url used a secure transport mechanism + parsed = urllib.parse.urlparse(str(location)) + origin_protocol, origin_host, origin_port = ( + parsed.scheme, + parsed.hostname, + parsed.port, + ) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + origin_protocol = origin_protocol.rsplit("+", 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in self.iter_secure_origins(): + secure_protocol, secure_host, secure_port = secure_origin + if origin_protocol != secure_protocol and secure_protocol != "*": + continue + + try: + addr = ipaddress.ip_address(origin_host) + network = ipaddress.ip_network(secure_host) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if ( + origin_host + and origin_host.lower() != secure_host.lower() + and secure_host != "*" + ): + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port matches. + if ( + origin_port != secure_port + and secure_port != "*" + and secure_port is not None + ): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS we " + "recommend you use HTTPS instead, otherwise you may silence " + "this warning and allow it anyway with '--trusted-host %s'.", + origin_host, + origin_host, + ) + + return False + + def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response: + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + + # Dispatch the actual request + return super().request(method, url, *args, **kwargs) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/utils.py b/.venv/lib/python3.9/site-packages/pip/_internal/network/utils.py new file mode 100644 index 0000000..094cf1b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/network/utils.py @@ -0,0 +1,96 @@ +from typing import Dict, Iterator + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.exceptions import NetworkConnectionError + +# The following comments and HTTP headers were originally added by +# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03. +# +# We use Accept-Encoding: identity here because requests defaults to +# accepting compressed responses. This breaks in a variety of ways +# depending on how the server is configured. +# - Some servers will notice that the file isn't a compressible file +# and will leave the file alone and with an empty Content-Encoding +# - Some servers will notice that the file is already compressed and +# will leave the file alone, adding a Content-Encoding: gzip header +# - Some servers won't notice anything at all and will take a file +# that's already been compressed and compress it again, and set +# the Content-Encoding: gzip header +# By setting this to request only the identity encoding we're hoping +# to eliminate the third case. Hopefully there does not exist a server +# which when given a file will notice it is already compressed and that +# you're not asking for a compressed file and will then decompress it +# before sending because if that's the case I don't think it'll ever be +# possible to make this work. +HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"} + + +def raise_for_status(resp: Response) -> None: + http_error_msg = "" + if isinstance(resp.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. + try: + reason = resp.reason.decode("utf-8") + except UnicodeDecodeError: + reason = resp.reason.decode("iso-8859-1") + else: + reason = resp.reason + + if 400 <= resp.status_code < 500: + http_error_msg = ( + f"{resp.status_code} Client Error: {reason} for url: {resp.url}" + ) + + elif 500 <= resp.status_code < 600: + http_error_msg = ( + f"{resp.status_code} Server Error: {reason} for url: {resp.url}" + ) + + if http_error_msg: + raise NetworkConnectionError(http_error_msg, response=resp) + + +def response_chunks( + response: Response, chunk_size: int = CONTENT_CHUNK_SIZE +) -> Iterator[bytes]: + """Given a requests Response, provide the data chunks.""" + try: + # Special case for urllib3. + for chunk in response.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False, + ): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = response.raw.read(chunk_size) + if not chunk: + break + yield chunk diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/network/xmlrpc.py b/.venv/lib/python3.9/site-packages/pip/_internal/network/xmlrpc.py new file mode 100644 index 0000000..4a7d55d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/network/xmlrpc.py @@ -0,0 +1,60 @@ +"""xmlrpclib.Transport implementation +""" + +import logging +import urllib.parse +import xmlrpc.client +from typing import TYPE_CHECKING, Tuple + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status + +if TYPE_CHECKING: + from xmlrpc.client import _HostType, _Marshallable + +logger = logging.getLogger(__name__) + + +class PipXmlrpcTransport(xmlrpc.client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + + def __init__( + self, index_url: str, session: PipSession, use_datetime: bool = False + ) -> None: + super().__init__(use_datetime) + index_parts = urllib.parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request( + self, + host: "_HostType", + handler: str, + request_body: bytes, + verbose: bool = False, + ) -> Tuple["_Marshallable", ...]: + assert isinstance(host, str) + parts = (self._scheme, host, handler, None, None, None) + url = urllib.parse.urlunparse(parts) + try: + headers = {"Content-Type": "text/xml"} + response = self._session.post( + url, + data=request_body, + headers=headers, + stream=True, + ) + raise_for_status(response) + self.verbose = verbose + return self.parse_response(response.raw) + except NetworkConnectionError as exc: + assert exc.response + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, + url, + ) + raise diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..8a29f2b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/check.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/check.cpython-39.pyc new file mode 100644 index 0000000..52f1982 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/check.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-39.pyc new file mode 100644 index 0000000..8558f3b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-39.pyc new file mode 100644 index 0000000..4e4ffd6 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e48355d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc new file mode 100644 index 0000000..91ef186 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc new file mode 100644 index 0000000..1c3fb45 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc new file mode 100644 index 0000000..bb2439e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc new file mode 100644 index 0000000..ef951a4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc new file mode 100644 index 0000000..d1b0fb2 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc new file mode 100644 index 0000000..77836eb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata.py new file mode 100644 index 0000000..7d12438 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata.py @@ -0,0 +1,30 @@ +"""Metadata generation logic for source distributions. +""" + +import os + +from pip._vendor.pep517.wrappers import Pep517HookCaller + +from pip._internal.build_env import BuildEnvironment +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + + +def generate_metadata(build_env: BuildEnvironment, backend: Pep517HookCaller) -> str: + """Generate metadata using mechanisms described in PEP 517. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that Pep517HookCaller implements a fallback for + # prepare_metadata_for_build_wheel, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") + with backend.subprocess_runner(runner): + distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir) + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_editable.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_editable.py new file mode 100644 index 0000000..13de75f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_editable.py @@ -0,0 +1,34 @@ +"""Metadata generation logic for source distributions. +""" + +import os + +from pip._vendor.pep517.wrappers import Pep517HookCaller + +from pip._internal.build_env import BuildEnvironment +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + + +def generate_editable_metadata( + build_env: BuildEnvironment, backend: Pep517HookCaller +) -> str: + """Generate metadata using mechanisms described in PEP 660. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that Pep517HookCaller implements a fallback for + # prepare_metadata_for_build_wheel/editable, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message( + "Preparing editable metadata (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir) + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py new file mode 100644 index 0000000..ff52de9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py @@ -0,0 +1,67 @@ +"""Metadata generation logic for legacy source distributions. +""" + +import logging +import os + +from pip._internal.build_env import BuildEnvironment +from pip._internal.cli.spinners import open_spinner +from pip._internal.exceptions import InstallationError +from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +def _find_egg_info(directory: str) -> str: + """Find an .egg-info subdirectory in `directory`.""" + filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")] + + if not filenames: + raise InstallationError(f"No .egg-info directory found in {directory}") + + if len(filenames) > 1: + raise InstallationError( + "More than one .egg-info directory found in {}".format(directory) + ) + + return os.path.join(directory, filenames[0]) + + +def generate_metadata( + build_env: BuildEnvironment, + setup_py_path: str, + source_dir: str, + isolated: bool, + details: str, +) -> str: + """Generate metadata using setup.py-based defacto mechanisms. + + Returns the generated metadata directory. + """ + logger.debug( + "Running setup.py (path:%s) egg_info for package %s", + setup_py_path, + details, + ) + + egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path + + args = make_setuptools_egg_info_args( + setup_py_path, + egg_info_dir=egg_info_dir, + no_user_config=isolated, + ) + + with build_env: + with open_spinner("Preparing metadata (setup.py)") as spinner: + call_subprocess( + args, + cwd=source_dir, + command_desc="python setup.py egg_info", + spinner=spinner, + ) + + # Return the .egg-info directory. + return _find_egg_info(egg_info_dir) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel.py new file mode 100644 index 0000000..b0d2fc9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel.py @@ -0,0 +1,37 @@ +import logging +import os +from typing import Optional + +from pip._vendor.pep517.wrappers import Pep517HookCaller + +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +def build_wheel_pep517( + name: str, + backend: Pep517HookCaller, + metadata_directory: str, + tempd: str, +) -> Optional[str]: + """Build one InstallRequirement using the PEP 517 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + try: + logger.debug("Destination directory: %s", tempd) + + runner = runner_with_spinner_message( + f"Building wheel for {name} (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + tempd, + metadata_directory=metadata_directory, + ) + except Exception: + logger.error("Failed building wheel for %s", name) + return None + return os.path.join(tempd, wheel_name) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_editable.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_editable.py new file mode 100644 index 0000000..cf7b01a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_editable.py @@ -0,0 +1,46 @@ +import logging +import os +from typing import Optional + +from pip._vendor.pep517.wrappers import HookMissing, Pep517HookCaller + +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +def build_wheel_editable( + name: str, + backend: Pep517HookCaller, + metadata_directory: str, + tempd: str, +) -> Optional[str]: + """Build one InstallRequirement using the PEP 660 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + try: + logger.debug("Destination directory: %s", tempd) + + runner = runner_with_spinner_message( + f"Building editable for {name} (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + try: + wheel_name = backend.build_editable( + tempd, + metadata_directory=metadata_directory, + ) + except HookMissing as e: + logger.error( + "Cannot build editable %s because the build " + "backend does not have the %s hook", + name, + e, + ) + return None + except Exception: + logger.error("Failed building editable for %s", name) + return None + return os.path.join(tempd, wheel_name) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_legacy.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_legacy.py new file mode 100644 index 0000000..2d5cb26 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_legacy.py @@ -0,0 +1,105 @@ +import logging +import os.path +from typing import List, Optional + +from pip._internal.cli.spinners import open_spinner +from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args +from pip._internal.utils.subprocess import ( + LOG_DIVIDER, + call_subprocess, + format_command_args, +) + +logger = logging.getLogger(__name__) + + +def format_command_result( + command_args: List[str], + command_output: str, +) -> str: + """Format command information for logging.""" + command_desc = format_command_args(command_args) + text = f"Command arguments: {command_desc}\n" + + if not command_output: + text += "Command output: None" + elif logger.getEffectiveLevel() > logging.DEBUG: + text += "Command output: [use --verbose to show]" + else: + if not command_output.endswith("\n"): + command_output += "\n" + text += f"Command output:\n{command_output}{LOG_DIVIDER}" + + return text + + +def get_legacy_build_wheel_path( + names: List[str], + temp_dir: str, + name: str, + command_args: List[str], + command_output: str, +) -> Optional[str]: + """Return the path to the wheel in the temporary build directory.""" + # Sort for determinism. + names = sorted(names) + if not names: + msg = ("Legacy build of wheel for {!r} created no files.\n").format(name) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + return None + + if len(names) > 1: + msg = ( + "Legacy build of wheel for {!r} created more than one file.\n" + "Filenames (choosing first): {}\n" + ).format(name, names) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + + return os.path.join(temp_dir, names[0]) + + +def build_wheel_legacy( + name: str, + setup_py_path: str, + source_dir: str, + global_options: List[str], + build_options: List[str], + tempd: str, +) -> Optional[str]: + """Build one unpacked package using the "legacy" build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + wheel_args = make_setuptools_bdist_wheel_args( + setup_py_path, + global_options=global_options, + build_options=build_options, + destination_dir=tempd, + ) + + spin_message = f"Building wheel for {name} (setup.py)" + with open_spinner(spin_message) as spinner: + logger.debug("Destination directory: %s", tempd) + + try: + output = call_subprocess( + wheel_args, + cwd=source_dir, + spinner=spinner, + ) + except Exception: + spinner.finish("error") + logger.error("Failed building wheel for %s", name) + return None + + names = os.listdir(tempd) + wheel_path = get_legacy_build_wheel_path( + names=names, + temp_dir=tempd, + name=name, + command_args=wheel_args, + command_output=output, + ) + return wheel_path diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/check.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/check.py new file mode 100644 index 0000000..fb3ac8b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/check.py @@ -0,0 +1,149 @@ +"""Validation of dependencies of packages +""" + +import logging +from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name + +from pip._internal.distributions import make_distribution_for_install_requirement +from pip._internal.metadata import get_default_environment +from pip._internal.metadata.base import DistributionVersion +from pip._internal.req.req_install import InstallRequirement + +logger = logging.getLogger(__name__) + + +class PackageDetails(NamedTuple): + version: DistributionVersion + dependencies: List[Requirement] + + +# Shorthands +PackageSet = Dict[NormalizedName, PackageDetails] +Missing = Tuple[NormalizedName, Requirement] +Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement] + +MissingDict = Dict[NormalizedName, List[Missing]] +ConflictingDict = Dict[NormalizedName, List[Conflicting]] +CheckResult = Tuple[MissingDict, ConflictingDict] +ConflictDetails = Tuple[PackageSet, CheckResult] + + +def create_package_set_from_installed() -> Tuple[PackageSet, bool]: + """Converts a list of distributions into a PackageSet.""" + package_set = {} + problems = False + env = get_default_environment() + for dist in env.iter_installed_distributions(local_only=False, skip=()): + name = dist.canonical_name + try: + dependencies = list(dist.iter_dependencies()) + package_set[name] = PackageDetails(dist.version, dependencies) + except (OSError, ValueError) as e: + # Don't crash on unreadable or broken metadata. + logger.warning("Error parsing requirements for %s: %s", name, e) + problems = True + return package_set, problems + + +def check_package_set( + package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None +) -> CheckResult: + """Check if a package set is consistent + + If should_ignore is passed, it should be a callable that takes a + package name and returns a boolean. + """ + + missing = {} + conflicting = {} + + for package_name, package_detail in package_set.items(): + # Info about dependencies of package_name + missing_deps: Set[Missing] = set() + conflicting_deps: Set[Conflicting] = set() + + if should_ignore and should_ignore(package_name): + continue + + for req in package_detail.dependencies: + name = canonicalize_name(req.name) + + # Check if it's missing + if name not in package_set: + missed = True + if req.marker is not None: + missed = req.marker.evaluate() + if missed: + missing_deps.add((name, req)) + continue + + # Check if there's a conflict + version = package_set[name].version + if not req.specifier.contains(version, prereleases=True): + conflicting_deps.add((name, version, req)) + + if missing_deps: + missing[package_name] = sorted(missing_deps, key=str) + if conflicting_deps: + conflicting[package_name] = sorted(conflicting_deps, key=str) + + return missing, conflicting + + +def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails: + """For checking if the dependency graph would be consistent after \ + installing given requirements + """ + # Start from the current state + package_set, _ = create_package_set_from_installed() + # Install packages + would_be_installed = _simulate_installation_of(to_install, package_set) + + # Only warn about directly-dependent packages; create a whitelist of them + whitelist = _create_whitelist(would_be_installed, package_set) + + return ( + package_set, + check_package_set( + package_set, should_ignore=lambda name: name not in whitelist + ), + ) + + +def _simulate_installation_of( + to_install: List[InstallRequirement], package_set: PackageSet +) -> Set[NormalizedName]: + """Computes the version of packages after installing to_install.""" + # Keep track of packages that were installed + installed = set() + + # Modify it as installing requirement_set would (assuming no errors) + for inst_req in to_install: + abstract_dist = make_distribution_for_install_requirement(inst_req) + dist = abstract_dist.get_metadata_distribution() + name = dist.canonical_name + package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies())) + + installed.add(name) + + return installed + + +def _create_whitelist( + would_be_installed: Set[NormalizedName], package_set: PackageSet +) -> Set[NormalizedName]: + packages_affected = set(would_be_installed) + + for package_name in package_set: + if package_name in packages_affected: + continue + + for req in package_set[package_name].dependencies: + if canonicalize_name(req.name) in packages_affected: + packages_affected.add(package_name) + break + + return packages_affected diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/freeze.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/freeze.py new file mode 100644 index 0000000..4565540 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/freeze.py @@ -0,0 +1,254 @@ +import collections +import logging +import os +from typing import Container, Dict, Iterable, Iterator, List, NamedTuple, Optional, Set + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.metadata import BaseDistribution, get_environment +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_file import COMMENT_RE +from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference + +logger = logging.getLogger(__name__) + + +class _EditableInfo(NamedTuple): + requirement: str + comments: List[str] + + +def freeze( + requirement: Optional[List[str]] = None, + local_only: bool = False, + user_only: bool = False, + paths: Optional[List[str]] = None, + isolated: bool = False, + exclude_editable: bool = False, + skip: Container[str] = (), +) -> Iterator[str]: + installations: Dict[str, FrozenRequirement] = {} + + dists = get_environment(paths).iter_installed_distributions( + local_only=local_only, + skip=(), + user_only=user_only, + ) + for dist in dists: + req = FrozenRequirement.from_dist(dist) + if exclude_editable and req.editable: + continue + installations[req.canonical_name] = req + + if requirement: + # the options that don't get turned into an InstallRequirement + # should only be emitted once, even if the same option is in multiple + # requirements files, so we need to keep track of what has been emitted + # so that we don't emit it again if it's seen again + emitted_options: Set[str] = set() + # keep track of which files a requirement is in so that we can + # give an accurate warning if a requirement appears multiple times. + req_files: Dict[str, List[str]] = collections.defaultdict(list) + for req_file_path in requirement: + with open(req_file_path) as req_file: + for line in req_file: + if ( + not line.strip() + or line.strip().startswith("#") + or line.startswith( + ( + "-r", + "--requirement", + "-f", + "--find-links", + "-i", + "--index-url", + "--pre", + "--trusted-host", + "--process-dependency-links", + "--extra-index-url", + "--use-feature", + ) + ) + ): + line = line.rstrip() + if line not in emitted_options: + emitted_options.add(line) + yield line + continue + + if line.startswith("-e") or line.startswith("--editable"): + if line.startswith("-e"): + line = line[2:].strip() + else: + line = line[len("--editable") :].strip().lstrip("=") + line_req = install_req_from_editable( + line, + isolated=isolated, + ) + else: + line_req = install_req_from_line( + COMMENT_RE.sub("", line).strip(), + isolated=isolated, + ) + + if not line_req.name: + logger.info( + "Skipping line in requirement file [%s] because " + "it's not clear what it would install: %s", + req_file_path, + line.strip(), + ) + logger.info( + " (add #egg=PackageName to the URL to avoid" + " this warning)" + ) + else: + line_req_canonical_name = canonicalize_name(line_req.name) + if line_req_canonical_name not in installations: + # either it's not installed, or it is installed + # but has been processed already + if not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but " + "package %r is not installed", + req_file_path, + COMMENT_RE.sub("", line).strip(), + line_req.name, + ) + else: + req_files[line_req.name].append(req_file_path) + else: + yield str(installations[line_req_canonical_name]).rstrip() + del installations[line_req_canonical_name] + req_files[line_req.name].append(req_file_path) + + # Warn about requirements that were included multiple times (in a + # single requirements file or in different requirements files). + for name, files in req_files.items(): + if len(files) > 1: + logger.warning( + "Requirement %s included multiple times [%s]", + name, + ", ".join(sorted(set(files))), + ) + + yield ("## The following requirements were added by pip freeze:") + for installation in sorted(installations.values(), key=lambda x: x.name.lower()): + if installation.canonical_name not in skip: + yield str(installation).rstrip() + + +def _format_as_name_version(dist: BaseDistribution) -> str: + if isinstance(dist.version, Version): + return f"{dist.raw_name}=={dist.version}" + return f"{dist.raw_name}==={dist.version}" + + +def _get_editable_info(dist: BaseDistribution) -> _EditableInfo: + """ + Compute and return values (req, comments) for use in + FrozenRequirement.from_dist(). + """ + editable_project_location = dist.editable_project_location + assert editable_project_location + location = os.path.normcase(os.path.abspath(editable_project_location)) + + from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs + + vcs_backend = vcs.get_backend_for_dir(location) + + if vcs_backend is None: + display = _format_as_name_version(dist) + logger.debug( + 'No VCS found for editable requirement "%s" in: %r', + display, + location, + ) + return _EditableInfo( + requirement=location, + comments=[f"# Editable install with no version control ({display})"], + ) + + vcs_name = type(vcs_backend).__name__ + + try: + req = vcs_backend.get_src_requirement(location, dist.raw_name) + except RemoteNotFoundError: + display = _format_as_name_version(dist) + return _EditableInfo( + requirement=location, + comments=[f"# Editable {vcs_name} install with no remote ({display})"], + ) + except RemoteNotValidError as ex: + display = _format_as_name_version(dist) + return _EditableInfo( + requirement=location, + comments=[ + f"# Editable {vcs_name} install ({display}) with either a deleted " + f"local remote or invalid URI:", + f"# '{ex.url}'", + ], + ) + except BadCommand: + logger.warning( + "cannot determine version of editable source in %s " + "(%s command not found in path)", + location, + vcs_backend.name, + ) + return _EditableInfo(requirement=location, comments=[]) + except InstallationError as exc: + logger.warning("Error when trying to get requirement for VCS system %s", exc) + else: + return _EditableInfo(requirement=req, comments=[]) + + logger.warning("Could not determine repository location of %s", location) + + return _EditableInfo( + requirement=location, + comments=["## !! Could not determine repository location"], + ) + + +class FrozenRequirement: + def __init__( + self, + name: str, + req: str, + editable: bool, + comments: Iterable[str] = (), + ) -> None: + self.name = name + self.canonical_name = canonicalize_name(name) + self.req = req + self.editable = editable + self.comments = comments + + @classmethod + def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement": + editable = dist.editable + if editable: + req, comments = _get_editable_info(dist) + else: + comments = [] + direct_url = dist.direct_url + if direct_url: + # if PEP 610 metadata is present, use it + req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name) + else: + # name==version requirement + req = _format_as_name_version(dist) + + return cls(dist.raw_name, req, editable, comments=comments) + + def __str__(self) -> str: + req = self.req + if self.editable: + req = f"-e {req}" + return "\n".join(list(self.comments) + [str(req)]) + "\n" diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__init__.py new file mode 100644 index 0000000..24d6a5d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__init__.py @@ -0,0 +1,2 @@ +"""For modules related to installing packages. +""" diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..b05d37c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc new file mode 100644 index 0000000..839d85c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc new file mode 100644 index 0000000..5c04afd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc new file mode 100644 index 0000000..8bc24d5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/editable_legacy.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/editable_legacy.py new file mode 100644 index 0000000..5bd72ca --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/editable_legacy.py @@ -0,0 +1,46 @@ +"""Legacy editable installation process, i.e. `setup.py develop`. +""" +import logging +from typing import List, Optional, Sequence + +from pip._internal.build_env import BuildEnvironment +from pip._internal.utils.logging import indent_log +from pip._internal.utils.setuptools_build import make_setuptools_develop_args +from pip._internal.utils.subprocess import call_subprocess + +logger = logging.getLogger(__name__) + + +def install_editable( + install_options: List[str], + global_options: Sequence[str], + prefix: Optional[str], + home: Optional[str], + use_user_site: bool, + name: str, + setup_py_path: str, + isolated: bool, + build_env: BuildEnvironment, + unpacked_source_directory: str, +) -> None: + """Install a package in editable mode. Most arguments are pass-through + to setuptools. + """ + logger.info("Running setup.py develop for %s", name) + + args = make_setuptools_develop_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + no_user_config=isolated, + prefix=prefix, + home=home, + use_user_site=use_user_site, + ) + + with indent_log(): + with build_env: + call_subprocess( + args, + cwd=unpacked_source_directory, + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/legacy.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/legacy.py new file mode 100644 index 0000000..2206c93 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/legacy.py @@ -0,0 +1,125 @@ +"""Legacy installation process, i.e. `setup.py install`. +""" + +import logging +import os +from distutils.util import change_root +from typing import List, Optional, Sequence + +from pip._internal.build_env import BuildEnvironment +from pip._internal.exceptions import InstallationError +from pip._internal.models.scheme import Scheme +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.setuptools_build import make_setuptools_install_args +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class LegacyInstallFailure(Exception): + pass + + +def write_installed_files_from_setuptools_record( + record_lines: List[str], + root: Optional[str], + req_description: str, +) -> None: + def prepend_root(path: str) -> str: + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + for line in record_lines: + directory = os.path.dirname(line) + if directory.endswith(".egg-info"): + egg_info_dir = prepend_root(directory) + break + else: + message = ( + "{} did not indicate that it installed an " + ".egg-info directory. Only setup.py projects " + "generating .egg-info directories are supported." + ).format(req_description) + raise InstallationError(message) + + new_lines = [] + for line in record_lines: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append(os.path.relpath(prepend_root(filename), egg_info_dir)) + new_lines.sort() + ensure_dir(egg_info_dir) + inst_files_path = os.path.join(egg_info_dir, "installed-files.txt") + with open(inst_files_path, "w") as f: + f.write("\n".join(new_lines) + "\n") + + +def install( + install_options: List[str], + global_options: Sequence[str], + root: Optional[str], + home: Optional[str], + prefix: Optional[str], + use_user_site: bool, + pycompile: bool, + scheme: Scheme, + setup_py_path: str, + isolated: bool, + req_name: str, + build_env: BuildEnvironment, + unpacked_source_directory: str, + req_description: str, +) -> bool: + + header_dir = scheme.headers + + with TempDirectory(kind="record") as temp_dir: + try: + record_filename = os.path.join(temp_dir.path, "install-record.txt") + install_args = make_setuptools_install_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + record_filename=record_filename, + root=root, + prefix=prefix, + header_dir=header_dir, + home=home, + use_user_site=use_user_site, + no_user_config=isolated, + pycompile=pycompile, + ) + + runner = runner_with_spinner_message( + f"Running setup.py install for {req_name}" + ) + with indent_log(), build_env: + runner( + cmd=install_args, + cwd=unpacked_source_directory, + ) + + if not os.path.exists(record_filename): + logger.debug("Record file %s not found", record_filename) + # Signal to the caller that we didn't install the new package + return False + + except Exception as e: + # Signal to the caller that we didn't install the new package + raise LegacyInstallFailure from e + + # At this point, we have successfully installed the requirement. + + # We intentionally do not use any encoding to read the file because + # setuptools writes the file using distutils.file_util.write_file, + # which does not specify an encoding. + with open(record_filename) as f: + record_lines = f.read().splitlines() + + write_installed_files_from_setuptools_record(record_lines, root, req_description) + return True diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/wheel.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/wheel.py new file mode 100644 index 0000000..e191b13 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/install/wheel.py @@ -0,0 +1,738 @@ +"""Support for installing and building the "wheel" binary package format. +""" + +import collections +import compileall +import contextlib +import csv +import importlib +import logging +import os.path +import re +import shutil +import sys +import warnings +from base64 import urlsafe_b64encode +from email.message import Message +from itertools import chain, filterfalse, starmap +from typing import ( + IO, + TYPE_CHECKING, + Any, + BinaryIO, + Callable, + Dict, + Iterable, + Iterator, + List, + NewType, + Optional, + Sequence, + Set, + Tuple, + Union, + cast, +) +from zipfile import ZipFile, ZipInfo + +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor.distlib.util import get_export_entry +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_major_minor_version +from pip._internal.metadata import ( + BaseDistribution, + FilesystemWheel, + get_wheel_distribution, +) +from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition +from pip._internal.utils.unpacking import ( + current_umask, + is_within_directory, + set_extracted_file_to_default_mode_plus_executable, + zip_item_is_executable, +) +from pip._internal.utils.wheel import parse_wheel + +if TYPE_CHECKING: + from typing import Protocol + + class File(Protocol): + src_record_path: "RecordPath" + dest_path: str + changed: bool + + def save(self) -> None: + pass + + +logger = logging.getLogger(__name__) + +RecordPath = NewType("RecordPath", str) +InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]] + + +def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]: + """Return (encoded_digest, length) for path using hashlib.sha256()""" + h, length = hash_file(path, blocksize) + digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=") + return (digest, str(length)) + + +def csv_io_kwargs(mode: str) -> Dict[str, Any]: + """Return keyword arguments to properly open a CSV file + in the given mode. + """ + return {"mode": mode, "newline": "", "encoding": "utf-8"} + + +def fix_script(path: str) -> bool: + """Replace #!python with #!/path/to/python + Return True if file was changed. + """ + # XXX RECORD hashes will need to be updated + assert os.path.isfile(path) + + with open(path, "rb") as script: + firstline = script.readline() + if not firstline.startswith(b"#!python"): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b"#!" + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, "wb") as script: + script.write(firstline) + script.write(rest) + return True + + +def wheel_root_is_purelib(metadata: Message) -> bool: + return metadata.get("Root-Is-Purelib", "").lower() == "true" + + +def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]: + console_scripts = {} + gui_scripts = {} + for entry_point in dist.iter_entry_points(): + if entry_point.group == "console_scripts": + console_scripts[entry_point.name] = entry_point.value + elif entry_point.group == "gui_scripts": + gui_scripts[entry_point.name] = entry_point.value + return console_scripts, gui_scripts + + +def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]: + """Determine if any scripts are not on PATH and format a warning. + Returns a warning message if one or more scripts are not on PATH, + otherwise None. + """ + if not scripts: + return None + + # Group scripts by the path they were installed in + grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set) + for destfile in scripts: + parent_dir = os.path.dirname(destfile) + script_name = os.path.basename(destfile) + grouped_by_dir[parent_dir].add(script_name) + + # We don't want to warn for directories that are on PATH. + not_warn_dirs = [ + os.path.normcase(i).rstrip(os.sep) + for i in os.environ.get("PATH", "").split(os.pathsep) + ] + # If an executable sits with sys.executable, we don't warn for it. + # This covers the case of venv invocations without activating the venv. + not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) + warn_for: Dict[str, Set[str]] = { + parent_dir: scripts + for parent_dir, scripts in grouped_by_dir.items() + if os.path.normcase(parent_dir) not in not_warn_dirs + } + if not warn_for: + return None + + # Format a message + msg_lines = [] + for parent_dir, dir_scripts in warn_for.items(): + sorted_scripts: List[str] = sorted(dir_scripts) + if len(sorted_scripts) == 1: + start_text = "script {} is".format(sorted_scripts[0]) + else: + start_text = "scripts {} are".format( + ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] + ) + + msg_lines.append( + "The {} installed in '{}' which is not on PATH.".format( + start_text, parent_dir + ) + ) + + last_line_fmt = ( + "Consider adding {} to PATH or, if you prefer " + "to suppress this warning, use --no-warn-script-location." + ) + if len(msg_lines) == 1: + msg_lines.append(last_line_fmt.format("this directory")) + else: + msg_lines.append(last_line_fmt.format("these directories")) + + # Add a note if any directory starts with ~ + warn_for_tilde = any( + i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i + ) + if warn_for_tilde: + tilde_warning_msg = ( + "NOTE: The current PATH contains path(s) starting with `~`, " + "which may not be expanded by all applications." + ) + msg_lines.append(tilde_warning_msg) + + # Returns the formatted multiline message + return "\n".join(msg_lines) + + +def _normalized_outrows( + outrows: Iterable[InstalledCSVRow], +) -> List[Tuple[str, str, str]]: + """Normalize the given rows of a RECORD file. + + Items in each row are converted into str. Rows are then sorted to make + the value more predictable for tests. + + Each row is a 3-tuple (path, hash, size) and corresponds to a record of + a RECORD file (see PEP 376 and PEP 427 for details). For the rows + passed to this function, the size can be an integer as an int or string, + or the empty string. + """ + # Normally, there should only be one row per path, in which case the + # second and third elements don't come into play when sorting. + # However, in cases in the wild where a path might happen to occur twice, + # we don't want the sort operation to trigger an error (but still want + # determinism). Since the third element can be an int or string, we + # coerce each element to a string to avoid a TypeError in this case. + # For additional background, see-- + # https://github.com/pypa/pip/issues/5868 + return sorted( + (record_path, hash_, str(size)) for record_path, hash_, size in outrows + ) + + +def _record_to_fs_path(record_path: RecordPath) -> str: + return record_path + + +def _fs_to_record_path(path: str, relative_to: Optional[str] = None) -> RecordPath: + if relative_to is not None: + # On Windows, do not handle relative paths if they belong to different + # logical disks + if ( + os.path.splitdrive(path)[0].lower() + == os.path.splitdrive(relative_to)[0].lower() + ): + path = os.path.relpath(path, relative_to) + path = path.replace(os.path.sep, "/") + return cast("RecordPath", path) + + +def get_csv_rows_for_installed( + old_csv_rows: List[List[str]], + installed: Dict[RecordPath, RecordPath], + changed: Set[RecordPath], + generated: List[str], + lib_dir: str, +) -> List[InstalledCSVRow]: + """ + :param installed: A map from archive RECORD path to installation RECORD + path. + """ + installed_rows: List[InstalledCSVRow] = [] + for row in old_csv_rows: + if len(row) > 3: + logger.warning("RECORD line has more than three elements: %s", row) + old_record_path = cast("RecordPath", row[0]) + new_record_path = installed.pop(old_record_path, old_record_path) + if new_record_path in changed: + digest, length = rehash(_record_to_fs_path(new_record_path)) + else: + digest = row[1] if len(row) > 1 else "" + length = row[2] if len(row) > 2 else "" + installed_rows.append((new_record_path, digest, length)) + for f in generated: + path = _fs_to_record_path(f, lib_dir) + digest, length = rehash(f) + installed_rows.append((path, digest, length)) + for installed_record_path in installed.values(): + installed_rows.append((installed_record_path, "", "")) + return installed_rows + + +def get_console_script_specs(console: Dict[str, str]) -> List[str]: + """ + Given the mapping from entrypoint name to callable, return the relevant + console script specs. + """ + # Don't mutate caller's version + console = console.copy() + + scripts_to_generate = [] + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadata 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop("pip", None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append("pip = " + pip_script) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + scripts_to_generate.append( + "pip{} = {}".format(sys.version_info[0], pip_script) + ) + + scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}") + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r"pip(\d(\.\d)?)?$", k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop("easy_install", None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append("easy_install = " + easy_install_script) + + scripts_to_generate.append( + "easy_install-{} = {}".format( + get_major_minor_version(), easy_install_script + ) + ) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r"easy_install(-\d\.\d)?$", k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console entry points specified in the wheel + scripts_to_generate.extend(starmap("{} = {}".format, console.items())) + + return scripts_to_generate + + +class ZipBackedFile: + def __init__( + self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile + ) -> None: + self.src_record_path = src_record_path + self.dest_path = dest_path + self._zip_file = zip_file + self.changed = False + + def _getinfo(self) -> ZipInfo: + return self._zip_file.getinfo(self.src_record_path) + + def save(self) -> None: + # directory creation is lazy and after file filtering + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + parent_dir = os.path.dirname(self.dest_path) + ensure_dir(parent_dir) + + # When we open the output file below, any existing file is truncated + # before we start writing the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(self.dest_path): + os.unlink(self.dest_path) + + zipinfo = self._getinfo() + + with self._zip_file.open(zipinfo) as f: + with open(self.dest_path, "wb") as dest: + shutil.copyfileobj(f, dest) + + if zip_item_is_executable(zipinfo): + set_extracted_file_to_default_mode_plus_executable(self.dest_path) + + +class ScriptFile: + def __init__(self, file: "File") -> None: + self._file = file + self.src_record_path = self._file.src_record_path + self.dest_path = self._file.dest_path + self.changed = False + + def save(self) -> None: + self._file.save() + self.changed = fix_script(self.dest_path) + + +class MissingCallableSuffix(InstallationError): + def __init__(self, entry_point: str) -> None: + super().__init__( + "Invalid script entry point: {} - A callable " + "suffix is required. Cf https://packaging.python.org/" + "specifications/entry-points/#use-for-scripts for more " + "information.".format(entry_point) + ) + + +def _raise_for_invalid_entrypoint(specification: str) -> None: + entry = get_export_entry(specification) + if entry is not None and entry.suffix is None: + raise MissingCallableSuffix(str(entry)) + + +class PipScriptMaker(ScriptMaker): + def make(self, specification: str, options: Dict[str, Any] = None) -> List[str]: + _raise_for_invalid_entrypoint(specification) + return super().make(specification, options) + + +def _install_wheel( + name: str, + wheel_zip: ZipFile, + wheel_path: str, + scheme: Scheme, + pycompile: bool = True, + warn_script_location: bool = True, + direct_url: Optional[DirectUrl] = None, + requested: bool = False, +) -> None: + """Install a wheel. + + :param name: Name of the project to install + :param wheel_zip: open ZipFile for wheel being installed + :param scheme: Distutils scheme dictating the install directories + :param req_description: String used in place of the requirement, for + logging + :param pycompile: Whether to byte-compile installed Python files + :param warn_script_location: Whether to check that scripts are installed + into a directory on PATH + :raises UnsupportedWheel: + * when the directory holds an unpacked wheel with incompatible + Wheel-Version + * when the .dist-info dir does not match the wheel + """ + info_dir, metadata = parse_wheel(wheel_zip, name) + + if wheel_root_is_purelib(metadata): + lib_dir = scheme.purelib + else: + lib_dir = scheme.platlib + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed: Dict[RecordPath, RecordPath] = {} + changed: Set[RecordPath] = set() + generated: List[str] = [] + + def record_installed( + srcfile: RecordPath, destfile: str, modified: bool = False + ) -> None: + """Map archive RECORD paths to installation RECORD paths.""" + newpath = _fs_to_record_path(destfile, lib_dir) + installed[srcfile] = newpath + if modified: + changed.add(_fs_to_record_path(destfile)) + + def is_dir_path(path: RecordPath) -> bool: + return path.endswith("/") + + def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None: + if not is_within_directory(dest_dir_path, target_path): + message = ( + "The wheel {!r} has a file {!r} trying to install" + " outside the target directory {!r}" + ) + raise InstallationError( + message.format(wheel_path, target_path, dest_dir_path) + ) + + def root_scheme_file_maker( + zip_file: ZipFile, dest: str + ) -> Callable[[RecordPath], "File"]: + def make_root_scheme_file(record_path: RecordPath) -> "File": + normed_path = os.path.normpath(record_path) + dest_path = os.path.join(dest, normed_path) + assert_no_path_traversal(dest, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_root_scheme_file + + def data_scheme_file_maker( + zip_file: ZipFile, scheme: Scheme + ) -> Callable[[RecordPath], "File"]: + scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS} + + def make_data_scheme_file(record_path: RecordPath) -> "File": + normed_path = os.path.normpath(record_path) + try: + _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) + except ValueError: + message = ( + "Unexpected file in {}: {!r}. .data directory contents" + " should be named like: '/'." + ).format(wheel_path, record_path) + raise InstallationError(message) + + try: + scheme_path = scheme_paths[scheme_key] + except KeyError: + valid_scheme_keys = ", ".join(sorted(scheme_paths)) + message = ( + "Unknown scheme key used in {}: {} (for file {!r}). .data" + " directory contents should be in subdirectories named" + " with a valid scheme key ({})" + ).format(wheel_path, scheme_key, record_path, valid_scheme_keys) + raise InstallationError(message) + + dest_path = os.path.join(scheme_path, dest_subpath) + assert_no_path_traversal(scheme_path, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_data_scheme_file + + def is_data_scheme_path(path: RecordPath) -> bool: + return path.split("/", 1)[0].endswith(".data") + + paths = cast(List[RecordPath], wheel_zip.namelist()) + file_paths = filterfalse(is_dir_path, paths) + root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths) + + make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir) + files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths) + + def is_script_scheme_path(path: RecordPath) -> bool: + parts = path.split("/", 2) + return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts" + + other_scheme_paths, script_scheme_paths = partition( + is_script_scheme_path, data_scheme_paths + ) + + make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) + other_scheme_files = map(make_data_scheme_file, other_scheme_paths) + files = chain(files, other_scheme_files) + + # Get the defined entry points + distribution = get_wheel_distribution( + FilesystemWheel(wheel_path), + canonicalize_name(name), + ) + console, gui = get_entrypoints(distribution) + + def is_entrypoint_wrapper(file: "File") -> bool: + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + path = file.dest_path + name = os.path.basename(path) + if name.lower().endswith(".exe"): + matchname = name[:-4] + elif name.lower().endswith("-script.py"): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return matchname in console or matchname in gui + + script_scheme_files: Iterator[File] = map( + make_data_scheme_file, script_scheme_paths + ) + script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files) + script_scheme_files = map(ScriptFile, script_scheme_files) + files = chain(files, script_scheme_files) + + for file in files: + file.save() + record_installed(file.src_record_path, file.dest_path, file.changed) + + def pyc_source_file_paths() -> Iterator[str]: + # We de-duplicate installation paths, since there can be overlap (e.g. + # file in .data maps to same location as file in wheel root). + # Sorting installation paths makes it easier to reproduce and debug + # issues related to permissions on existing files. + for installed_path in sorted(set(installed.values())): + full_installed_path = os.path.join(lib_dir, installed_path) + if not os.path.isfile(full_installed_path): + continue + if not full_installed_path.endswith(".py"): + continue + yield full_installed_path + + def pyc_output_path(path: str) -> str: + """Return the path the pyc file would have been written to.""" + return importlib.util.cache_from_source(path) + + # Compile all of the pyc files for the installed files + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + for path in pyc_source_file_paths(): + success = compileall.compile_file(path, force=True, quiet=True) + if success: + pyc_path = pyc_output_path(path) + assert os.path.exists(pyc_path) + pyc_record_path = cast( + "RecordPath", pyc_path.replace(os.path.sep, "/") + ) + record_installed(pyc_record_path, pyc_path) + logger.debug(stdout.getvalue()) + + maker = PipScriptMaker(None, scheme.scripts) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = {""} + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Generate the console and GUI entry points specified in the wheel + scripts_to_generate = get_console_script_specs(console) + + gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items())) + + generated_console_scripts = maker.make_multiple(scripts_to_generate) + generated.extend(generated_console_scripts) + + generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True})) + + if warn_script_location: + msg = message_about_scripts_not_on_PATH(generated_console_scripts) + if msg is not None: + logger.warning(msg) + + generated_file_mode = 0o666 & ~current_umask() + + @contextlib.contextmanager + def _generate_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]: + with adjacent_tmp_file(path, **kwargs) as f: + yield f + os.chmod(f.name, generated_file_mode) + replace(f.name, path) + + dest_info_dir = os.path.join(lib_dir, info_dir) + + # Record pip as the installer + installer_path = os.path.join(dest_info_dir, "INSTALLER") + with _generate_file(installer_path) as installer_file: + installer_file.write(b"pip\n") + generated.append(installer_path) + + # Record the PEP 610 direct URL reference + if direct_url is not None: + direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) + with _generate_file(direct_url_path) as direct_url_file: + direct_url_file.write(direct_url.to_json().encode("utf-8")) + generated.append(direct_url_path) + + # Record the REQUESTED file + if requested: + requested_path = os.path.join(dest_info_dir, "REQUESTED") + with open(requested_path, "wb"): + pass + generated.append(requested_path) + + record_text = distribution.read_text("RECORD") + record_rows = list(csv.reader(record_text.splitlines())) + + rows = get_csv_rows_for_installed( + record_rows, + installed=installed, + changed=changed, + generated=generated, + lib_dir=lib_dir, + ) + + # Record details of all files installed + record_path = os.path.join(dest_info_dir, "RECORD") + + with _generate_file(record_path, **csv_io_kwargs("w")) as record_file: + # Explicitly cast to typing.IO[str] as a workaround for the mypy error: + # "writer" has incompatible type "BinaryIO"; expected "_Writer" + writer = csv.writer(cast("IO[str]", record_file)) + writer.writerows(_normalized_outrows(rows)) + + +@contextlib.contextmanager +def req_error_context(req_description: str) -> Iterator[None]: + try: + yield + except InstallationError as e: + message = "For req: {}. {}".format(req_description, e.args[0]) + raise InstallationError(message) from e + + +def install_wheel( + name: str, + wheel_path: str, + scheme: Scheme, + req_description: str, + pycompile: bool = True, + warn_script_location: bool = True, + direct_url: Optional[DirectUrl] = None, + requested: bool = False, +) -> None: + with ZipFile(wheel_path, allowZip64=True) as z: + with req_error_context(req_description): + _install_wheel( + name=name, + wheel_zip=z, + wheel_path=wheel_path, + scheme=scheme, + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=direct_url, + requested=requested, + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/operations/prepare.py b/.venv/lib/python3.9/site-packages/pip/_internal/operations/prepare.py new file mode 100644 index 0000000..34cf9a5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/operations/prepare.py @@ -0,0 +1,632 @@ +"""Prepares a distribution for installation +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import mimetypes +import os +import shutil +from typing import Dict, Iterable, List, Optional + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.distributions import make_distribution_for_install_requirement +from pip._internal.distributions.installed import InstalledDistribution +from pip._internal.exceptions import ( + DirectoryUrlHashUnsupported, + HashMismatch, + HashUnpinned, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + VcsHashUnsupported, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.network.download import BatchDownloader, Downloader +from pip._internal.network.lazy_wheel import ( + HTTPRangeRequestUnsupported, + dist_from_wheel_url, +) +from pip._internal.network.session import PipSession +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import RequirementTracker +from pip._internal.utils.filesystem import copy2_fixed +from pip._internal.utils.hashes import Hashes, MissingHashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import display_path, hide_url, is_installable_dir, rmtree +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.unpacking import unpack_file +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + + +def _get_prepared_distribution( + req: InstallRequirement, + req_tracker: RequirementTracker, + finder: PackageFinder, + build_isolation: bool, +) -> BaseDistribution: + """Prepare a distribution for installation.""" + abstract_dist = make_distribution_for_install_requirement(req) + with req_tracker.track(req): + abstract_dist.prepare_distribution_metadata(finder, build_isolation) + return abstract_dist.get_metadata_distribution() + + +def unpack_vcs_link(link: Link, location: str) -> None: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend is not None + vcs_backend.unpack(location, url=hide_url(link.url)) + + +class File: + def __init__(self, path: str, content_type: Optional[str]) -> None: + self.path = path + if content_type is None: + self.content_type = mimetypes.guess_type(path)[0] + else: + self.content_type = content_type + + +def get_http_url( + link: Link, + download: Downloader, + download_dir: Optional[str] = None, + hashes: Optional[Hashes] = None, +) -> File: + temp_dir = TempDirectory(kind="unpack", globally_managed=True) + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, download_dir, hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + content_type = None + else: + # let's download to a tmp dir + from_path, content_type = download(link, temp_dir.path) + if hashes: + hashes.check_against_path(from_path) + + return File(from_path, content_type) + + +def _copy2_ignoring_special_files(src: str, dest: str) -> None: + """Copying special files is not supported, but as a convenience to users + we skip errors copying them. This supports tools that may create e.g. + socket files in the project source directory. + """ + try: + copy2_fixed(src, dest) + except shutil.SpecialFileError as e: + # SpecialFileError may be raised due to either the source or + # destination. If the destination was the cause then we would actually + # care, but since the destination directory is deleted prior to + # copy we ignore all of them assuming it is caused by the source. + logger.warning( + "Ignoring special file error '%s' encountered copying %s to %s.", + str(e), + src, + dest, + ) + + +def _copy_source_tree(source: str, target: str) -> None: + target_abspath = os.path.abspath(target) + target_basename = os.path.basename(target_abspath) + target_dirname = os.path.dirname(target_abspath) + + def ignore(d: str, names: List[str]) -> List[str]: + skipped: List[str] = [] + if d == source: + # Pulling in those directories can potentially be very slow, + # exclude the following directories if they appear in the top + # level dir (and only it). + # See discussion at https://github.com/pypa/pip/pull/6770 + skipped += [".tox", ".nox"] + if os.path.abspath(d) == target_dirname: + # Prevent an infinite recursion if the target is in source. + # This can happen when TMPDIR is set to ${PWD}/... + # and we copy PWD to TMPDIR. + skipped += [target_basename] + return skipped + + shutil.copytree( + source, + target, + ignore=ignore, + symlinks=True, + copy_function=_copy2_ignoring_special_files, + ) + + +def get_file_url( + link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None +) -> File: + """Get file and optionally check its hash.""" + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, download_dir, hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link.file_path + + # If --require-hashes is off, `hashes` is either empty, the + # link's embedded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(from_path) + return File(from_path, None) + + +def unpack_url( + link: Link, + location: str, + download: Downloader, + download_dir: Optional[str] = None, + hashes: Optional[Hashes] = None, +) -> Optional[File]: + """Unpack link into location, downloading if required. + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if link.is_vcs: + unpack_vcs_link(link, location) + return None + + # Once out-of-tree-builds are no longer supported, could potentially + # replace the below condition with `assert not link.is_existing_dir` + # - unpack_url does not need to be called for in-tree-builds. + # + # As further cleanup, _copy_source_tree and accompanying tests can + # be removed. + # + # TODO when use-deprecated=out-of-tree-build is removed + if link.is_existing_dir(): + if os.path.isdir(location): + rmtree(location) + _copy_source_tree(link.file_path, location) + return None + + # file urls + if link.is_file: + file = get_file_url(link, download_dir, hashes=hashes) + + # http urls + else: + file = get_http_url( + link, + download, + download_dir, + hashes=hashes, + ) + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies, except wheels + if not link.is_wheel: + unpack_file(file.path, location, file.content_type) + + return file + + +def _check_download_dir( + link: Link, download_dir: str, hashes: Optional[Hashes] +) -> Optional[str]: + """Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + + if not os.path.exists(download_path): + return None + + # If already downloaded, does its hash match? + logger.info("File was already downloaded %s", download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + logger.warning( + "Previously-downloaded file %s has bad hash. Re-downloading.", + download_path, + ) + os.unlink(download_path) + return None + return download_path + + +class RequirementPreparer: + """Prepares a Requirement""" + + def __init__( + self, + build_dir: str, + download_dir: Optional[str], + src_dir: str, + build_isolation: bool, + req_tracker: RequirementTracker, + session: PipSession, + progress_bar: str, + finder: PackageFinder, + require_hashes: bool, + use_user_site: bool, + lazy_wheel: bool, + in_tree_build: bool, + ) -> None: + super().__init__() + + self.src_dir = src_dir + self.build_dir = build_dir + self.req_tracker = req_tracker + self._session = session + self._download = Downloader(session, progress_bar) + self._batch_download = BatchDownloader(session, progress_bar) + self.finder = finder + + # Where still-packed archives should be written to. If None, they are + # not saved, and are deleted immediately after unpacking. + self.download_dir = download_dir + + # Is build isolation allowed? + self.build_isolation = build_isolation + + # Should hash-checking be required? + self.require_hashes = require_hashes + + # Should install in user site-packages? + self.use_user_site = use_user_site + + # Should wheels be downloaded lazily? + self.use_lazy_wheel = lazy_wheel + + # Should in-tree builds be used for local paths? + self.in_tree_build = in_tree_build + + # Memoized downloaded files, as mapping of url: path. + self._downloaded: Dict[str, str] = {} + + # Previous "header" printed for a link-based InstallRequirement + self._previous_requirement_header = ("", "") + + def _log_preparing_link(self, req: InstallRequirement) -> None: + """Provide context for the requirement being prepared.""" + if req.link.is_file and not req.original_link_is_in_wheel_cache: + message = "Processing %s" + information = str(display_path(req.link.file_path)) + else: + message = "Collecting %s" + information = str(req.req or req) + + if (message, information) != self._previous_requirement_header: + self._previous_requirement_header = (message, information) + logger.info(message, information) + + if req.original_link_is_in_wheel_cache: + with indent_log(): + logger.info("Using cached %s", req.link.filename) + + def _ensure_link_req_src_dir( + self, req: InstallRequirement, parallel_builds: bool + ) -> None: + """Ensure source_dir of a linked InstallRequirement.""" + # Since source_dir is only set for editable requirements. + if req.link.is_wheel: + # We don't need to unpack wheels, so no need for a source + # directory. + return + assert req.source_dir is None + if req.link.is_existing_dir() and self.in_tree_build: + # build local directories in-tree + req.source_dir = req.link.file_path + return + + # We always delete unpacked sdists after pip runs. + req.ensure_has_source_dir( + self.build_dir, + autodelete=True, + parallel_builds=parallel_builds, + ) + + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req.source_dir` + # TODO: this check is now probably dead code + if is_installable_dir(req.source_dir): + raise PreviousBuildDirError( + "pip can't proceed with requirements '{}' due to a" + "pre-existing build directory ({}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again.".format(req, req.source_dir) + ) + + def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: + # By the time this is called, the requirement's link should have + # been checked so we can tell what kind of requirements req is + # and raise some more informative errors than otherwise. + # (For example, we can raise VcsHashUnsupported for a VCS URL + # rather than HashMissing.) + if not self.require_hashes: + return req.hashes(trust_internet=True) + + # We could check these first 2 conditions inside unpack_url + # and save repetition of conditions, but then we would + # report less-useful error messages for unhashable + # requirements, complaining that there's no hash provided. + if req.link.is_vcs: + raise VcsHashUnsupported() + if req.link.is_existing_dir(): + raise DirectoryUrlHashUnsupported() + + # Unpinned packages are asking for trouble when a new version + # is uploaded. This isn't a security check, but it saves users + # a surprising hash mismatch in the future. + # file:/// URLs aren't pinnable, so don't complain about them + # not being pinned. + if req.original_link is None and not req.is_pinned: + raise HashUnpinned() + + # If known-good hashes are missing for this requirement, + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + return req.hashes(trust_internet=False) or MissingHashes() + + def _fetch_metadata_using_lazy_wheel( + self, + link: Link, + ) -> Optional[BaseDistribution]: + """Fetch metadata using lazy wheel, if possible.""" + if not self.use_lazy_wheel: + return None + if self.require_hashes: + logger.debug("Lazy wheel is not used as hash checking is required") + return None + if link.is_file or not link.is_wheel: + logger.debug( + "Lazy wheel is not used as %r does not points to a remote wheel", + link, + ) + return None + + wheel = Wheel(link.filename) + name = canonicalize_name(wheel.name) + logger.info( + "Obtaining dependency information from %s %s", + name, + wheel.version, + ) + url = link.url.split("#", 1)[0] + try: + return dist_from_wheel_url(name, url, self._session) + except HTTPRangeRequestUnsupported: + logger.debug("%s does not support range requests", url) + return None + + def _complete_partial_requirements( + self, + partially_downloaded_reqs: Iterable[InstallRequirement], + parallel_builds: bool = False, + ) -> None: + """Download any requirements which were only fetched by metadata.""" + # Download to a temporary directory. These will be copied over as + # needed for downstream 'download', 'wheel', and 'install' commands. + temp_dir = TempDirectory(kind="unpack", globally_managed=True).path + + # Map each link to the requirement that owns it. This allows us to set + # `req.local_file_path` on the appropriate requirement after passing + # all the links at once into BatchDownloader. + links_to_fully_download: Dict[Link, InstallRequirement] = {} + for req in partially_downloaded_reqs: + assert req.link + links_to_fully_download[req.link] = req + + batch_download = self._batch_download( + links_to_fully_download.keys(), + temp_dir, + ) + for link, (filepath, _) in batch_download: + logger.debug("Downloading link %s to %s", link, filepath) + req = links_to_fully_download[link] + req.local_file_path = filepath + + # This step is necessary to ensure all lazy wheels are processed + # successfully by the 'download', 'wheel', and 'install' commands. + for req in partially_downloaded_reqs: + self._prepare_linked_requirement(req, parallel_builds) + + def prepare_linked_requirement( + self, req: InstallRequirement, parallel_builds: bool = False + ) -> BaseDistribution: + """Prepare a requirement to be obtained from req.link.""" + assert req.link + link = req.link + self._log_preparing_link(req) + with indent_log(): + # Check if the relevant file is already available + # in the download directory + file_path = None + if self.download_dir is not None and link.is_wheel: + hashes = self._get_linked_req_hashes(req) + file_path = _check_download_dir(req.link, self.download_dir, hashes) + + if file_path is not None: + # The file is already available, so mark it as downloaded + self._downloaded[req.link.url] = file_path + else: + # The file is not available, attempt to fetch only metadata + wheel_dist = self._fetch_metadata_using_lazy_wheel(link) + if wheel_dist is not None: + req.needs_more_preparation = True + return wheel_dist + + # None of the optimizations worked, fully prepare the requirement + return self._prepare_linked_requirement(req, parallel_builds) + + def prepare_linked_requirements_more( + self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False + ) -> None: + """Prepare linked requirements more, if needed.""" + reqs = [req for req in reqs if req.needs_more_preparation] + for req in reqs: + # Determine if any of these requirements were already downloaded. + if self.download_dir is not None and req.link.is_wheel: + hashes = self._get_linked_req_hashes(req) + file_path = _check_download_dir(req.link, self.download_dir, hashes) + if file_path is not None: + self._downloaded[req.link.url] = file_path + req.needs_more_preparation = False + + # Prepare requirements we found were already downloaded for some + # reason. The other downloads will be completed separately. + partially_downloaded_reqs: List[InstallRequirement] = [] + for req in reqs: + if req.needs_more_preparation: + partially_downloaded_reqs.append(req) + else: + self._prepare_linked_requirement(req, parallel_builds) + + # TODO: separate this part out from RequirementPreparer when the v1 + # resolver can be removed! + self._complete_partial_requirements( + partially_downloaded_reqs, + parallel_builds=parallel_builds, + ) + + def _prepare_linked_requirement( + self, req: InstallRequirement, parallel_builds: bool + ) -> BaseDistribution: + assert req.link + link = req.link + + self._ensure_link_req_src_dir(req, parallel_builds) + hashes = self._get_linked_req_hashes(req) + + if link.is_existing_dir() and self.in_tree_build: + local_file = None + elif link.url not in self._downloaded: + try: + local_file = unpack_url( + link, req.source_dir, self._download, self.download_dir, hashes + ) + except NetworkConnectionError as exc: + raise InstallationError( + "Could not install requirement {} because of HTTP " + "error {} for URL {}".format(req, exc, link) + ) + else: + file_path = self._downloaded[link.url] + if hashes: + hashes.check_against_path(file_path) + local_file = File(file_path, content_type=None) + + # For use in later processing, + # preserve the file path on the requirement. + if local_file: + req.local_file_path = local_file.path + + dist = _get_prepared_distribution( + req, + self.req_tracker, + self.finder, + self.build_isolation, + ) + return dist + + def save_linked_requirement(self, req: InstallRequirement) -> None: + assert self.download_dir is not None + assert req.link is not None + link = req.link + if link.is_vcs or (link.is_existing_dir() and req.editable): + # Make a .zip of the source_dir we already created. + req.archive(self.download_dir) + return + + if link.is_existing_dir(): + logger.debug( + "Not copying link to destination directory " + "since it is a directory: %s", + link, + ) + return + if req.local_file_path is None: + # No distribution was downloaded for this requirement. + return + + download_location = os.path.join(self.download_dir, link.filename) + if not os.path.exists(download_location): + shutil.copy(req.local_file_path, download_location) + download_path = display_path(download_location) + logger.info("Saved %s", download_path) + + def prepare_editable_requirement( + self, + req: InstallRequirement, + ) -> BaseDistribution: + """Prepare an editable requirement.""" + assert req.editable, "cannot prepare a non-editable req as editable" + + logger.info("Obtaining %s", req) + + with indent_log(): + if self.require_hashes: + raise InstallationError( + "The editable requirement {} cannot be installed when " + "requiring hashes, because there is no single file to " + "hash.".format(req) + ) + req.ensure_has_source_dir(self.src_dir) + req.update_editable() + + dist = _get_prepared_distribution( + req, + self.req_tracker, + self.finder, + self.build_isolation, + ) + + req.check_if_exists(self.use_user_site) + + return dist + + def prepare_installed_requirement( + self, + req: InstallRequirement, + skip_reason: str, + ) -> BaseDistribution: + """Prepare an already-installed requirement.""" + assert req.satisfied_by, "req should have been satisfied but isn't" + assert skip_reason is not None, ( + "did not get skip reason skipped but req.satisfied_by " + "is set to {}".format(req.satisfied_by) + ) + logger.info( + "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version + ) + with indent_log(): + if self.require_hashes: + logger.debug( + "Since it is already installed, we are trusting this " + "package without checking its hash. To ensure a " + "completely repeatable environment, install into an " + "empty virtualenv." + ) + return InstalledDistribution(req).get_metadata_distribution() diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/pyproject.py b/.venv/lib/python3.9/site-packages/pip/_internal/pyproject.py new file mode 100644 index 0000000..31534a3 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/pyproject.py @@ -0,0 +1,183 @@ +import os +from collections import namedtuple +from typing import Any, List, Optional + +from pip._vendor import tomli +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement + +from pip._internal.exceptions import InstallationError + + +def _is_list_of_str(obj: Any) -> bool: + return isinstance(obj, list) and all(isinstance(item, str) for item in obj) + + +def make_pyproject_path(unpacked_source_directory: str) -> str: + return os.path.join(unpacked_source_directory, "pyproject.toml") + + +BuildSystemDetails = namedtuple( + "BuildSystemDetails", ["requires", "backend", "check", "backend_path"] +) + + +def load_pyproject_toml( + use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str +) -> Optional[BuildSystemDetails]: + """Load the pyproject.toml file. + + Parameters: + use_pep517 - Has the user requested PEP 517 processing? None + means the user hasn't explicitly specified. + pyproject_toml - Location of the project's pyproject.toml file + setup_py - Location of the project's setup.py file + req_name - The name of the requirement we're processing (for + error reporting) + + Returns: + None if we should use the legacy code path, otherwise a tuple + ( + requirements from pyproject.toml, + name of PEP 517 backend, + requirements we should check are installed after setting + up the build environment + directory paths to import the backend from (backend-path), + relative to the project root. + ) + """ + has_pyproject = os.path.isfile(pyproject_toml) + has_setup = os.path.isfile(setup_py) + + if not has_pyproject and not has_setup: + raise InstallationError( + f"{req_name} does not appear to be a Python project: " + f"neither 'setup.py' nor 'pyproject.toml' found." + ) + + if has_pyproject: + with open(pyproject_toml, encoding="utf-8") as f: + pp_toml = tomli.load(f) + build_system = pp_toml.get("build-system") + else: + build_system = None + + # The following cases must use PEP 517 + # We check for use_pep517 being non-None and falsey because that means + # the user explicitly requested --no-use-pep517. The value 0 as + # opposed to False can occur when the value is provided via an + # environment variable or config file option (due to the quirk of + # strtobool() returning an integer in pip's configuration code). + if has_pyproject and not has_setup: + if use_pep517 is not None and not use_pep517: + raise InstallationError( + "Disabling PEP 517 processing is invalid: " + "project does not have a setup.py" + ) + use_pep517 = True + elif build_system and "build-backend" in build_system: + if use_pep517 is not None and not use_pep517: + raise InstallationError( + "Disabling PEP 517 processing is invalid: " + "project specifies a build backend of {} " + "in pyproject.toml".format(build_system["build-backend"]) + ) + use_pep517 = True + + # If we haven't worked out whether to use PEP 517 yet, + # and the user hasn't explicitly stated a preference, + # we do so if the project has a pyproject.toml file. + elif use_pep517 is None: + use_pep517 = has_pyproject + + # At this point, we know whether we're going to use PEP 517. + assert use_pep517 is not None + + # If we're using the legacy code path, there is nothing further + # for us to do here. + if not use_pep517: + return None + + if build_system is None: + # Either the user has a pyproject.toml with no build-system + # section, or the user has no pyproject.toml, but has opted in + # explicitly via --use-pep517. + # In the absence of any explicit backend specification, we + # assume the setuptools backend that most closely emulates the + # traditional direct setup.py execution, and require wheel and + # a version of setuptools that supports that backend. + + build_system = { + "requires": ["setuptools>=40.8.0", "wheel"], + "build-backend": "setuptools.build_meta:__legacy__", + } + + # If we're using PEP 517, we have build system information (either + # from pyproject.toml, or defaulted by the code above). + # Note that at this point, we do not know if the user has actually + # specified a backend, though. + assert build_system is not None + + # Ensure that the build-system section in pyproject.toml conforms + # to PEP 518. + error_template = ( + "{package} has a pyproject.toml file that does not comply " + "with PEP 518: {reason}" + ) + + # Specifying the build-system table but not the requires key is invalid + if "requires" not in build_system: + raise InstallationError( + error_template.format( + package=req_name, + reason=( + "it has a 'build-system' table but not " + "'build-system.requires' which is mandatory in the table" + ), + ) + ) + + # Error out if requires is not a list of strings + requires = build_system["requires"] + if not _is_list_of_str(requires): + raise InstallationError( + error_template.format( + package=req_name, + reason="'build-system.requires' is not a list of strings.", + ) + ) + + # Each requirement must be valid as per PEP 508 + for requirement in requires: + try: + Requirement(requirement) + except InvalidRequirement: + raise InstallationError( + error_template.format( + package=req_name, + reason=( + "'build-system.requires' contains an invalid " + "requirement: {!r}".format(requirement) + ), + ) + ) + + backend = build_system.get("build-backend") + backend_path = build_system.get("backend-path", []) + check: List[str] = [] + if backend is None: + # If the user didn't specify a backend, we assume they want to use + # the setuptools backend. But we can't be sure they have included + # a version of setuptools which supplies the backend, or wheel + # (which is needed by the backend) in their requirements. So we + # make a note to check that those requirements are present once + # we have set up the environment. + # This is quite a lot of work to check for a very specific case. But + # the problem is, that case is potentially quite common - projects that + # adopted PEP 518 early for the ability to specify requirements to + # execute setup.py, but never considered needing to mention the build + # tools themselves. The original PEP 518 code had a similar check (but + # implemented in a different way). + backend = "setuptools.build_meta:__legacy__" + check = ["setuptools>=40.8.0", "wheel"] + + return BuildSystemDetails(requires, backend, check, backend_path) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/req/__init__.py new file mode 100644 index 0000000..70dea27 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/req/__init__.py @@ -0,0 +1,94 @@ +import collections +import logging +from typing import Iterator, List, Optional, Sequence, Tuple + +from pip._internal.utils.logging import indent_log + +from .req_file import parse_requirements +from .req_install import InstallRequirement +from .req_set import RequirementSet + +__all__ = [ + "RequirementSet", + "InstallRequirement", + "parse_requirements", + "install_given_reqs", +] + +logger = logging.getLogger(__name__) + + +class InstallationResult: + def __init__(self, name: str) -> None: + self.name = name + + def __repr__(self) -> str: + return f"InstallationResult(name={self.name!r})" + + +def _validate_requirements( + requirements: List[InstallRequirement], +) -> Iterator[Tuple[str, InstallRequirement]]: + for req in requirements: + assert req.name, f"invalid to-be-installed requirement: {req}" + yield req.name, req + + +def install_given_reqs( + requirements: List[InstallRequirement], + install_options: List[str], + global_options: Sequence[str], + root: Optional[str], + home: Optional[str], + prefix: Optional[str], + warn_script_location: bool, + use_user_site: bool, + pycompile: bool, +) -> List[InstallationResult]: + """ + Install everything in the given list. + + (to be called after having downloaded and unpacked the packages) + """ + to_install = collections.OrderedDict(_validate_requirements(requirements)) + + if to_install: + logger.info( + "Installing collected packages: %s", + ", ".join(to_install.keys()), + ) + + installed = [] + + with indent_log(): + for req_name, requirement in to_install.items(): + if requirement.should_reinstall: + logger.info("Attempting uninstall: %s", req_name) + with indent_log(): + uninstalled_pathset = requirement.uninstall(auto_confirm=True) + else: + uninstalled_pathset = None + + try: + requirement.install( + install_options, + global_options, + root=root, + home=home, + prefix=prefix, + warn_script_location=warn_script_location, + use_user_site=use_user_site, + pycompile=pycompile, + ) + except Exception: + # if install did not succeed, rollback previous uninstall + if uninstalled_pathset and not requirement.install_succeeded: + uninstalled_pathset.rollback() + raise + else: + if uninstalled_pathset and requirement.install_succeeded: + uninstalled_pathset.commit() + + installed.append(InstallationResult(req_name)) + + return installed diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..2371d1d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/constructors.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/constructors.cpython-39.pyc new file mode 100644 index 0000000..6712e52 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/constructors.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_file.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_file.cpython-39.pyc new file mode 100644 index 0000000..4a29ac4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_file.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_install.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_install.cpython-39.pyc new file mode 100644 index 0000000..777a074 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_install.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_set.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_set.cpython-39.pyc new file mode 100644 index 0000000..3b1b0a5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_set.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-39.pyc new file mode 100644 index 0000000..97ab9c5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc new file mode 100644 index 0000000..fc9d767 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/constructors.py b/.venv/lib/python3.9/site-packages/pip/_internal/req/constructors.py new file mode 100644 index 0000000..4a59403 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/req/constructors.py @@ -0,0 +1,466 @@ +"""Backing implementation for InstallRequirement's various constructors + +The idea here is that these formed a major chunk of InstallRequirement's size +so, moving them and support code dedicated to them outside of that class +helps creates for better understandability for the rest of the code. + +These are meant to be used elsewhere within pip to create instances of +InstallRequirement. +""" + +import logging +import os +import re +from typing import Any, Dict, Optional, Set, Tuple, Union + +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement +from pip._vendor.packaging.specifiers import Specifier +from pip._vendor.pkg_resources import RequirementParseError, parse_requirements + +from pip._internal.exceptions import InstallationError +from pip._internal.models.index import PyPI, TestPyPI +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.req.req_file import ParsedRequirement +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import is_installable_dir +from pip._internal.utils.packaging import get_requirement +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import is_url, vcs + +__all__ = [ + "install_req_from_editable", + "install_req_from_line", + "parse_editable", +] + +logger = logging.getLogger(__name__) +operators = Specifier._operators.keys() + + +def _strip_extras(path: str) -> Tuple[str, Optional[str]]: + m = re.match(r"^(.+)(\[[^\]]+\])$", path) + extras = None + if m: + path_no_extras = m.group(1) + extras = m.group(2) + else: + path_no_extras = path + + return path_no_extras, extras + + +def convert_extras(extras: Optional[str]) -> Set[str]: + if not extras: + return set() + return get_requirement("placeholder" + extras.lower()).extras + + +def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]: + """Parses an editable requirement into: + - a requirement name + - an URL + - extras + - editable options + Accepted requirements: + svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir + .[some_extra] + """ + + url = editable_req + + # If a file path is specified with extras, strip off the extras. + url_no_extras, extras = _strip_extras(url) + + if os.path.isdir(url_no_extras): + # Treating it as code that has already been checked out + url_no_extras = path_to_url(url_no_extras) + + if url_no_extras.lower().startswith("file:"): + package_name = Link(url_no_extras).egg_fragment + if extras: + return ( + package_name, + url_no_extras, + get_requirement("placeholder" + extras.lower()).extras, + ) + else: + return package_name, url_no_extras, set() + + for version_control in vcs: + if url.lower().startswith(f"{version_control}:"): + url = f"{version_control}+{url}" + break + + link = Link(url) + + if not link.is_vcs: + backends = ", ".join(vcs.all_schemes) + raise InstallationError( + f"{editable_req} is not a valid editable requirement. " + f"It should either be a path to a local project or a VCS URL " + f"(beginning with {backends})." + ) + + package_name = link.egg_fragment + if not package_name: + raise InstallationError( + "Could not detect requirement name for '{}', please specify one " + "with #egg=your_package_name".format(editable_req) + ) + return package_name, url, set() + + +def deduce_helpful_msg(req: str) -> str: + """Returns helpful msg in case requirements file does not exist, + or cannot be parsed. + + :params req: Requirements file path + """ + msg = "" + if os.path.exists(req): + msg = " The path does exist. " + # Try to parse and check if it is a requirements file. + try: + with open(req) as fp: + # parse first line only + next(parse_requirements(fp.read())) + msg += ( + "The argument you provided " + "({}) appears to be a" + " requirements file. If that is the" + " case, use the '-r' flag to install" + " the packages specified within it." + ).format(req) + except RequirementParseError: + logger.debug("Cannot parse '%s' as requirements file", req, exc_info=True) + else: + msg += f" File '{req}' does not exist." + return msg + + +class RequirementParts: + def __init__( + self, + requirement: Optional[Requirement], + link: Optional[Link], + markers: Optional[Marker], + extras: Set[str], + ): + self.requirement = requirement + self.link = link + self.markers = markers + self.extras = extras + + +def parse_req_from_editable(editable_req: str) -> RequirementParts: + name, url, extras_override = parse_editable(editable_req) + + if name is not None: + try: + req: Optional[Requirement] = Requirement(name) + except InvalidRequirement: + raise InstallationError(f"Invalid requirement: '{name}'") + else: + req = None + + link = Link(url) + + return RequirementParts(req, link, None, extras_override) + + +# ---- The actual constructors follow ---- + + +def install_req_from_editable( + editable_req: str, + comes_from: Optional[Union[InstallRequirement, str]] = None, + use_pep517: Optional[bool] = None, + isolated: bool = False, + options: Optional[Dict[str, Any]] = None, + constraint: bool = False, + user_supplied: bool = False, + permit_editable_wheels: bool = False, +) -> InstallRequirement: + + parts = parse_req_from_editable(editable_req) + + return InstallRequirement( + parts.requirement, + comes_from=comes_from, + user_supplied=user_supplied, + editable=True, + permit_editable_wheels=permit_editable_wheels, + link=parts.link, + constraint=constraint, + use_pep517=use_pep517, + isolated=isolated, + install_options=options.get("install_options", []) if options else [], + global_options=options.get("global_options", []) if options else [], + hash_options=options.get("hashes", {}) if options else {}, + extras=parts.extras, + ) + + +def _looks_like_path(name: str) -> bool: + """Checks whether the string "looks like" a path on the filesystem. + + This does not check whether the target actually exists, only judge from the + appearance. + + Returns true if any of the following conditions is true: + * a path separator is found (either os.path.sep or os.path.altsep); + * a dot is found (which represents the current directory). + """ + if os.path.sep in name: + return True + if os.path.altsep is not None and os.path.altsep in name: + return True + if name.startswith("."): + return True + return False + + +def _get_url_from_path(path: str, name: str) -> Optional[str]: + """ + First, it checks whether a provided path is an installable directory. If it + is, returns the path. + + If false, check if the path is an archive file (such as a .whl). + The function checks if the path is a file. If false, if the path has + an @, it will treat it as a PEP 440 URL requirement and return the path. + """ + if _looks_like_path(name) and os.path.isdir(path): + if is_installable_dir(path): + return path_to_url(path) + # TODO: The is_installable_dir test here might not be necessary + # now that it is done in load_pyproject_toml too. + raise InstallationError( + f"Directory {name!r} is not installable. Neither 'setup.py' " + "nor 'pyproject.toml' found." + ) + if not is_archive_file(path): + return None + if os.path.isfile(path): + return path_to_url(path) + urlreq_parts = name.split("@", 1) + if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): + # If the path contains '@' and the part before it does not look + # like a path, try to treat it as a PEP 440 URL req instead. + return None + logger.warning( + "Requirement %r looks like a filename, but the file does not exist", + name, + ) + return path_to_url(path) + + +def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts: + if is_url(name): + marker_sep = "; " + else: + marker_sep = ";" + if marker_sep in name: + name, markers_as_string = name.split(marker_sep, 1) + markers_as_string = markers_as_string.strip() + if not markers_as_string: + markers = None + else: + markers = Marker(markers_as_string) + else: + markers = None + name = name.strip() + req_as_string = None + path = os.path.normpath(os.path.abspath(name)) + link = None + extras_as_string = None + + if is_url(name): + link = Link(name) + else: + p, extras_as_string = _strip_extras(path) + url = _get_url_from_path(p, name) + if url is not None: + link = Link(url) + + # it's a local file, dir, or url + if link: + # Handle relative file URLs + if link.scheme == "file" and re.search(r"\.\./", link.url): + link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path)))) + # wheel file + if link.is_wheel: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + req_as_string = f"{wheel.name}=={wheel.version}" + else: + # set the req to the egg fragment. when it's not there, this + # will become an 'unnamed' requirement + req_as_string = link.egg_fragment + + # a requirement specifier + else: + req_as_string = name + + extras = convert_extras(extras_as_string) + + def with_source(text: str) -> str: + if not line_source: + return text + return f"{text} (from {line_source})" + + def _parse_req_string(req_as_string: str) -> Requirement: + try: + req = get_requirement(req_as_string) + except InvalidRequirement: + if os.path.sep in req_as_string: + add_msg = "It looks like a path." + add_msg += deduce_helpful_msg(req_as_string) + elif "=" in req_as_string and not any( + op in req_as_string for op in operators + ): + add_msg = "= is not a valid operator. Did you mean == ?" + else: + add_msg = "" + msg = with_source(f"Invalid requirement: {req_as_string!r}") + if add_msg: + msg += f"\nHint: {add_msg}" + raise InstallationError(msg) + else: + # Deprecate extras after specifiers: "name>=1.0[extras]" + # This currently works by accident because _strip_extras() parses + # any extras in the end of the string and those are saved in + # RequirementParts + for spec in req.specifier: + spec_str = str(spec) + if spec_str.endswith("]"): + msg = f"Extras after version '{spec_str}'." + raise InstallationError(msg) + return req + + if req_as_string is not None: + req: Optional[Requirement] = _parse_req_string(req_as_string) + else: + req = None + + return RequirementParts(req, link, markers, extras) + + +def install_req_from_line( + name: str, + comes_from: Optional[Union[str, InstallRequirement]] = None, + use_pep517: Optional[bool] = None, + isolated: bool = False, + options: Optional[Dict[str, Any]] = None, + constraint: bool = False, + line_source: Optional[str] = None, + user_supplied: bool = False, +) -> InstallRequirement: + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + + :param line_source: An optional string describing where the line is from, + for logging purposes in case of an error. + """ + parts = parse_req_from_line(name, line_source) + + return InstallRequirement( + parts.requirement, + comes_from, + link=parts.link, + markers=parts.markers, + use_pep517=use_pep517, + isolated=isolated, + install_options=options.get("install_options", []) if options else [], + global_options=options.get("global_options", []) if options else [], + hash_options=options.get("hashes", {}) if options else {}, + constraint=constraint, + extras=parts.extras, + user_supplied=user_supplied, + ) + + +def install_req_from_req_string( + req_string: str, + comes_from: Optional[InstallRequirement] = None, + isolated: bool = False, + use_pep517: Optional[bool] = None, + user_supplied: bool = False, +) -> InstallRequirement: + try: + req = get_requirement(req_string) + except InvalidRequirement: + raise InstallationError(f"Invalid requirement: '{req_string}'") + + domains_not_allowed = [ + PyPI.file_storage_domain, + TestPyPI.file_storage_domain, + ] + if ( + req.url + and comes_from + and comes_from.link + and comes_from.link.netloc in domains_not_allowed + ): + # Explicitly disallow pypi packages that depend on external urls + raise InstallationError( + "Packages installed from PyPI cannot depend on packages " + "which are not also hosted on PyPI.\n" + "{} depends on {} ".format(comes_from.name, req) + ) + + return InstallRequirement( + req, + comes_from, + isolated=isolated, + use_pep517=use_pep517, + user_supplied=user_supplied, + ) + + +def install_req_from_parsed_requirement( + parsed_req: ParsedRequirement, + isolated: bool = False, + use_pep517: Optional[bool] = None, + user_supplied: bool = False, +) -> InstallRequirement: + if parsed_req.is_editable: + req = install_req_from_editable( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + constraint=parsed_req.constraint, + isolated=isolated, + user_supplied=user_supplied, + ) + + else: + req = install_req_from_line( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + isolated=isolated, + options=parsed_req.options, + constraint=parsed_req.constraint, + line_source=parsed_req.line_source, + user_supplied=user_supplied, + ) + return req + + +def install_req_from_link_and_ireq( + link: Link, ireq: InstallRequirement +) -> InstallRequirement: + return InstallRequirement( + req=ireq.req, + comes_from=ireq.comes_from, + editable=ireq.editable, + link=link, + markers=ireq.markers, + use_pep517=ireq.use_pep517, + isolated=ireq.isolated, + install_options=ireq.install_options, + global_options=ireq.global_options, + hash_options=ireq.hash_options, + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/req_file.py b/.venv/lib/python3.9/site-packages/pip/_internal/req/req_file.py new file mode 100644 index 0000000..03ae504 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/req/req_file.py @@ -0,0 +1,536 @@ +""" +Requirements file parsing +""" + +import optparse +import os +import re +import shlex +import urllib.parse +from optparse import Values +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + Iterator, + List, + Optional, + Tuple, +) + +from pip._internal.cli import cmdoptions +from pip._internal.exceptions import InstallationError, RequirementsFileParseError +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.encoding import auto_decode +from pip._internal.utils.urls import get_url_scheme + +if TYPE_CHECKING: + # NoReturn introduced in 3.6.2; imported only for type checking to maintain + # pip compatibility with older patch versions of Python 3.6 + from typing import NoReturn + + from pip._internal.index.package_finder import PackageFinder + +__all__ = ["parse_requirements"] + +ReqFileLines = Iterable[Tuple[int, str]] + +LineParser = Callable[[str], Tuple[str, Values]] + +SCHEME_RE = re.compile(r"^(http|https|file):", re.I) +COMMENT_RE = re.compile(r"(^|\s+)#.*$") + +# Matches environment variable-style values in '${MY_VARIABLE_1}' with the +# variable name consisting of only uppercase letters, digits or the '_' +# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, +# 2013 Edition. +ENV_VAR_RE = re.compile(r"(?P\$\{(?P[A-Z0-9_]+)\})") + +SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [ + cmdoptions.index_url, + cmdoptions.extra_index_url, + cmdoptions.no_index, + cmdoptions.constraints, + cmdoptions.requirements, + cmdoptions.editable, + cmdoptions.find_links, + cmdoptions.no_binary, + cmdoptions.only_binary, + cmdoptions.prefer_binary, + cmdoptions.require_hashes, + cmdoptions.pre, + cmdoptions.trusted_host, + cmdoptions.use_new_feature, +] + +# options to be passed to requirements +SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [ + cmdoptions.install_options, + cmdoptions.global_options, + cmdoptions.hash, +] + +# the 'dest' string values +SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] + + +class ParsedRequirement: + def __init__( + self, + requirement: str, + is_editable: bool, + comes_from: str, + constraint: bool, + options: Optional[Dict[str, Any]] = None, + line_source: Optional[str] = None, + ) -> None: + self.requirement = requirement + self.is_editable = is_editable + self.comes_from = comes_from + self.options = options + self.constraint = constraint + self.line_source = line_source + + +class ParsedLine: + def __init__( + self, + filename: str, + lineno: int, + args: str, + opts: Values, + constraint: bool, + ) -> None: + self.filename = filename + self.lineno = lineno + self.opts = opts + self.constraint = constraint + + if args: + self.is_requirement = True + self.is_editable = False + self.requirement = args + elif opts.editables: + self.is_requirement = True + self.is_editable = True + # We don't support multiple -e on one line + self.requirement = opts.editables[0] + else: + self.is_requirement = False + + +def parse_requirements( + filename: str, + session: PipSession, + finder: Optional["PackageFinder"] = None, + options: Optional[optparse.Values] = None, + constraint: bool = False, +) -> Iterator[ParsedRequirement]: + """Parse a requirements file and yield ParsedRequirement instances. + + :param filename: Path or url of requirements file. + :param session: PipSession instance. + :param finder: Instance of pip.index.PackageFinder. + :param options: cli options. + :param constraint: If true, parsing a constraint file rather than + requirements file. + """ + line_parser = get_line_parser(finder) + parser = RequirementsFileParser(session, line_parser) + + for parsed_line in parser.parse(filename, constraint): + parsed_req = handle_line( + parsed_line, options=options, finder=finder, session=session + ) + if parsed_req is not None: + yield parsed_req + + +def preprocess(content: str) -> ReqFileLines: + """Split, filter, and join lines, and return a line iterator + + :param content: the content of the requirements file + """ + lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1) + lines_enum = join_lines(lines_enum) + lines_enum = ignore_comments(lines_enum) + lines_enum = expand_env_variables(lines_enum) + return lines_enum + + +def handle_requirement_line( + line: ParsedLine, + options: Optional[optparse.Values] = None, +) -> ParsedRequirement: + + # preserve for the nested code path + line_comes_from = "{} {} (line {})".format( + "-c" if line.constraint else "-r", + line.filename, + line.lineno, + ) + + assert line.is_requirement + + if line.is_editable: + # For editable requirements, we don't support per-requirement + # options, so just return the parsed requirement. + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + ) + else: + if options: + # Disable wheels if the user has specified build options + cmdoptions.check_install_build_global(options, line.opts) + + # get the options that apply to requirements + req_options = {} + for dest in SUPPORTED_OPTIONS_REQ_DEST: + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + + line_source = f"line {line.lineno} of {line.filename}" + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + options=req_options, + line_source=line_source, + ) + + +def handle_option_line( + opts: Values, + filename: str, + lineno: int, + finder: Optional["PackageFinder"] = None, + options: Optional[optparse.Values] = None, + session: Optional[PipSession] = None, +) -> None: + + if options: + # percolate options upward + if opts.require_hashes: + options.require_hashes = opts.require_hashes + if opts.features_enabled: + options.features_enabled.extend( + f for f in opts.features_enabled if f not in options.features_enabled + ) + + # set finder options + if finder: + find_links = finder.find_links + index_urls = finder.index_urls + if opts.index_url: + index_urls = [opts.index_url] + if opts.no_index is True: + index_urls = [] + if opts.extra_index_urls: + index_urls.extend(opts.extra_index_urls) + if opts.find_links: + # FIXME: it would be nice to keep track of the source + # of the find_links: support a find-links local path + # relative to a requirements file. + value = opts.find_links[0] + req_dir = os.path.dirname(os.path.abspath(filename)) + relative_to_reqs_file = os.path.join(req_dir, value) + if os.path.exists(relative_to_reqs_file): + value = relative_to_reqs_file + find_links.append(value) + + if session: + # We need to update the auth urls in session + session.update_index_urls(index_urls) + + search_scope = SearchScope( + find_links=find_links, + index_urls=index_urls, + ) + finder.search_scope = search_scope + + if opts.pre: + finder.set_allow_all_prereleases() + + if opts.prefer_binary: + finder.set_prefer_binary() + + if session: + for host in opts.trusted_hosts or []: + source = f"line {lineno} of {filename}" + session.add_trusted_host(host, source=source) + + +def handle_line( + line: ParsedLine, + options: Optional[optparse.Values] = None, + finder: Optional["PackageFinder"] = None, + session: Optional[PipSession] = None, +) -> Optional[ParsedRequirement]: + """Handle a single parsed requirements line; This can result in + creating/yielding requirements, or updating the finder. + + :param line: The parsed line to be processed. + :param options: CLI options. + :param finder: The finder - updated by non-requirement lines. + :param session: The session - updated by non-requirement lines. + + Returns a ParsedRequirement object if the line is a requirement line, + otherwise returns None. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + """ + + if line.is_requirement: + parsed_req = handle_requirement_line(line, options) + return parsed_req + else: + handle_option_line( + line.opts, + line.filename, + line.lineno, + finder, + options, + session, + ) + return None + + +class RequirementsFileParser: + def __init__( + self, + session: PipSession, + line_parser: LineParser, + ) -> None: + self._session = session + self._line_parser = line_parser + + def parse(self, filename: str, constraint: bool) -> Iterator[ParsedLine]: + """Parse a given file, yielding parsed lines.""" + yield from self._parse_and_recurse(filename, constraint) + + def _parse_and_recurse( + self, filename: str, constraint: bool + ) -> Iterator[ParsedLine]: + for line in self._parse_file(filename, constraint): + if not line.is_requirement and ( + line.opts.requirements or line.opts.constraints + ): + # parse a nested requirements file + if line.opts.requirements: + req_path = line.opts.requirements[0] + nested_constraint = False + else: + req_path = line.opts.constraints[0] + nested_constraint = True + + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib.parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join( + os.path.dirname(filename), + req_path, + ) + + yield from self._parse_and_recurse(req_path, nested_constraint) + else: + yield line + + def _parse_file(self, filename: str, constraint: bool) -> Iterator[ParsedLine]: + _, content = get_file_content(filename, self._session) + + lines_enum = preprocess(content) + + for line_number, line in lines_enum: + try: + args_str, opts = self._line_parser(line) + except OptionParsingError as e: + # add offending line + msg = f"Invalid requirement: {line}\n{e.msg}" + raise RequirementsFileParseError(msg) + + yield ParsedLine( + filename, + line_number, + args_str, + opts, + constraint, + ) + + +def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser: + def parse_line(line: str) -> Tuple[str, Values]: + # Build new parser for each line since it accumulates appendable + # options. + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + defaults.format_control = finder.format_control + + args_str, options_str = break_args_options(line) + + opts, _ = parser.parse_args(shlex.split(options_str), defaults) + + return args_str, opts + + return parse_line + + +def break_args_options(line: str) -> Tuple[str, str]: + """Break up the line into an args and options string. We only want to shlex + (and then optparse) the options, not the args. args can contain markers + which are corrupted by shlex. + """ + tokens = line.split(" ") + args = [] + options = tokens[:] + for token in tokens: + if token.startswith("-") or token.startswith("--"): + break + else: + args.append(token) + options.pop(0) + return " ".join(args), " ".join(options) + + +class OptionParsingError(Exception): + def __init__(self, msg: str) -> None: + self.msg = msg + + +def build_parser() -> optparse.OptionParser: + """ + Return a parser for parsing requirement lines + """ + parser = optparse.OptionParser(add_help_option=False) + + option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ + for option_factory in option_factories: + option = option_factory() + parser.add_option(option) + + # By default optparse sys.exits on parsing errors. We want to wrap + # that in our own exception. + def parser_exit(self: Any, msg: str) -> "NoReturn": + raise OptionParsingError(msg) + + # NOTE: mypy disallows assigning to a method + # https://github.com/python/mypy/issues/2427 + parser.exit = parser_exit # type: ignore + + return parser + + +def join_lines(lines_enum: ReqFileLines) -> ReqFileLines: + """Joins a line ending in '\' with the previous line (except when following + comments). The joined line takes on the index of the first line. + """ + primary_line_number = None + new_line: List[str] = [] + for line_number, line in lines_enum: + if not line.endswith("\\") or COMMENT_RE.match(line): + if COMMENT_RE.match(line): + # this ensures comments are always matched later + line = " " + line + if new_line: + new_line.append(line) + assert primary_line_number is not None + yield primary_line_number, "".join(new_line) + new_line = [] + else: + yield line_number, line + else: + if not new_line: + primary_line_number = line_number + new_line.append(line.strip("\\")) + + # last line contains \ + if new_line: + assert primary_line_number is not None + yield primary_line_number, "".join(new_line) + + # TODO: handle space after '\'. + + +def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines: + """ + Strips comments and filter empty lines. + """ + for line_number, line in lines_enum: + line = COMMENT_RE.sub("", line) + line = line.strip() + if line: + yield line_number, line + + +def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines: + """Replace all environment variables that can be retrieved via `os.getenv`. + + The only allowed format for environment variables defined in the + requirement file is `${MY_VARIABLE_1}` to ensure two things: + + 1. Strings that contain a `$` aren't accidentally (partially) expanded. + 2. Ensure consistency across platforms for requirement files. + + These points are the result of a discussion on the `github pull + request #3514 `_. + + Valid characters in variable names follow the `POSIX standard + `_ and are limited + to uppercase letter, digits and the `_` (underscore). + """ + for line_number, line in lines_enum: + for env_var, var_name in ENV_VAR_RE.findall(line): + value = os.getenv(var_name) + if not value: + continue + + line = line.replace(env_var, value) + + yield line_number, line + + +def get_file_content(url: str, session: PipSession) -> Tuple[str, str]: + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode. + Respects # -*- coding: declarations on the retrieved files. + + :param url: File path or url. + :param session: PipSession instance. + """ + scheme = get_url_scheme(url) + + # Pip has special support for file:// URLs (LocalFSAdapter). + if scheme in ["http", "https", "file"]: + resp = session.get(url) + raise_for_status(resp) + return resp.url, resp.text + + # Assume this is a bare path. + try: + with open(url, "rb") as f: + content = auto_decode(f.read()) + except OSError as exc: + raise InstallationError(f"Could not open requirements file: {exc}") + return url, content diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/req_install.py b/.venv/lib/python3.9/site-packages/pip/_internal/req/req_install.py new file mode 100644 index 0000000..95dacab --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/req/req_install.py @@ -0,0 +1,891 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import functools +import logging +import os +import shutil +import sys +import uuid +import zipfile +from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union + +from pip._vendor import pkg_resources +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.pep517.wrappers import Pep517HookCaller +from pip._vendor.pkg_resources import Distribution + +from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_scheme +from pip._internal.models.link import Link +from pip._internal.operations.build.metadata import generate_metadata +from pip._internal.operations.build.metadata_editable import generate_editable_metadata +from pip._internal.operations.build.metadata_legacy import ( + generate_metadata as generate_metadata_legacy, +) +from pip._internal.operations.install.editable_legacy import ( + install_editable as install_editable_legacy, +) +from pip._internal.operations.install.legacy import LegacyInstallFailure +from pip._internal.operations.install.legacy import install as install_legacy +from pip._internal.operations.install.wheel import install_wheel +from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path +from pip._internal.req.req_uninstall import UninstallPathSet +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.direct_url_helpers import ( + direct_url_for_editable, + direct_url_from_link, +) +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + ask_path_exists, + backup_dir, + display_path, + dist_in_site_packages, + dist_in_usersite, + get_distribution, + hide_url, + redact_auth_from_url, +) +from pip._internal.utils.packaging import get_metadata +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.virtualenv import running_under_virtualenv +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + + +def _get_dist(metadata_directory: str) -> Distribution: + """Return a pkg_resources.Distribution for the provided + metadata directory. + """ + dist_dir = metadata_directory.rstrip(os.sep) + + # Build a PathMetadata object, from path to metadata. :wink: + base_dir, dist_dir_name = os.path.split(dist_dir) + metadata = pkg_resources.PathMetadata(base_dir, dist_dir) + + # Determine the correct Distribution object type. + if dist_dir.endswith(".egg-info"): + dist_cls = pkg_resources.Distribution + dist_name = os.path.splitext(dist_dir_name)[0] + else: + assert dist_dir.endswith(".dist-info") + dist_cls = pkg_resources.DistInfoDistribution + dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] + + return dist_cls( + base_dir, + project_name=dist_name, + metadata=metadata, + ) + + +class InstallRequirement: + """ + Represents something that may be installed later on, may have information + about where to fetch the relevant requirement and also contains logic for + installing the said requirement. + """ + + def __init__( + self, + req: Optional[Requirement], + comes_from: Optional[Union[str, "InstallRequirement"]], + editable: bool = False, + link: Optional[Link] = None, + markers: Optional[Marker] = None, + use_pep517: Optional[bool] = None, + isolated: bool = False, + install_options: Optional[List[str]] = None, + global_options: Optional[List[str]] = None, + hash_options: Optional[Dict[str, List[str]]] = None, + constraint: bool = False, + extras: Collection[str] = (), + user_supplied: bool = False, + permit_editable_wheels: bool = False, + ) -> None: + assert req is None or isinstance(req, Requirement), req + self.req = req + self.comes_from = comes_from + self.constraint = constraint + self.editable = editable + self.permit_editable_wheels = permit_editable_wheels + self.legacy_install_reason: Optional[int] = None + + # source_dir is the local directory where the linked requirement is + # located, or unpacked. In case unpacking is needed, creating and + # populating source_dir is done by the RequirementPreparer. Note this + # is not necessarily the directory where pyproject.toml or setup.py is + # located - that one is obtained via unpacked_source_directory. + self.source_dir: Optional[str] = None + if self.editable: + assert link + if link.is_file: + self.source_dir = os.path.normpath(os.path.abspath(link.file_path)) + + if link is None and req and req.url: + # PEP 508 URL requirement + link = Link(req.url) + self.link = self.original_link = link + self.original_link_is_in_wheel_cache = False + + # Path to any downloaded or already-existing package. + self.local_file_path: Optional[str] = None + if self.link and self.link.is_file: + self.local_file_path = self.link.file_path + + if extras: + self.extras = extras + elif req: + self.extras = {pkg_resources.safe_extra(extra) for extra in req.extras} + else: + self.extras = set() + if markers is None and req: + markers = req.marker + self.markers = markers + + # This holds the pkg_resources.Distribution object if this requirement + # is already available: + self.satisfied_by: Optional[Distribution] = None + # Whether the installation process should try to uninstall an existing + # distribution before installing this requirement. + self.should_reinstall = False + # Temporary build location + self._temp_build_dir: Optional[TempDirectory] = None + # Set to True after successful installation + self.install_succeeded: Optional[bool] = None + # Supplied options + self.install_options = install_options if install_options else [] + self.global_options = global_options if global_options else [] + self.hash_options = hash_options if hash_options else {} + # Set to True after successful preparation of this requirement + self.prepared = False + # User supplied requirement are explicitly requested for installation + # by the user via CLI arguments or requirements files, as opposed to, + # e.g. dependencies, extras or constraints. + self.user_supplied = user_supplied + + self.isolated = isolated + self.build_env: BuildEnvironment = NoOpBuildEnvironment() + + # For PEP 517, the directory where we request the project metadata + # gets stored. We need this to pass to build_wheel, so the backend + # can ensure that the wheel matches the metadata (see the PEP for + # details). + self.metadata_directory: Optional[str] = None + + # The static build requirements (from pyproject.toml) + self.pyproject_requires: Optional[List[str]] = None + + # Build requirements that we will check are available + self.requirements_to_check: List[str] = [] + + # The PEP 517 backend we should use to build the project + self.pep517_backend: Optional[Pep517HookCaller] = None + + # Are we using PEP 517 for this requirement? + # After pyproject.toml has been loaded, the only valid values are True + # and False. Before loading, None is valid (meaning "use the default"). + # Setting an explicit value before loading pyproject.toml is supported, + # but after loading this flag should be treated as read only. + self.use_pep517 = use_pep517 + + # This requirement needs more preparation before it can be built + self.needs_more_preparation = False + + def __str__(self) -> str: + if self.req: + s = str(self.req) + if self.link: + s += " from {}".format(redact_auth_from_url(self.link.url)) + elif self.link: + s = redact_auth_from_url(self.link.url) + else: + s = "" + if self.satisfied_by is not None: + s += " in {}".format(display_path(self.satisfied_by.location)) + if self.comes_from: + if isinstance(self.comes_from, str): + comes_from: Optional[str] = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += f" (from {comes_from})" + return s + + def __repr__(self) -> str: + return "<{} object: {} editable={!r}>".format( + self.__class__.__name__, str(self), self.editable + ) + + def format_debug(self) -> str: + """An un-tested helper for getting state, for debugging.""" + attributes = vars(self) + names = sorted(attributes) + + state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)) + return "<{name} object: {{{state}}}>".format( + name=self.__class__.__name__, + state=", ".join(state), + ) + + # Things that are valid for all kinds of requirements? + @property + def name(self) -> Optional[str]: + if self.req is None: + return None + return pkg_resources.safe_name(self.req.name) + + @functools.lru_cache() # use cached_property in python 3.8+ + def supports_pyproject_editable(self) -> bool: + if not self.use_pep517: + return False + assert self.pep517_backend + with self.build_env: + runner = runner_with_spinner_message( + "Checking if build backend supports build_editable" + ) + with self.pep517_backend.subprocess_runner(runner): + return "build_editable" in self.pep517_backend._supported_features() + + @property + def specifier(self) -> SpecifierSet: + return self.req.specifier + + @property + def is_pinned(self) -> bool: + """Return whether I am pinned to an exact version. + + For example, some-package==1.2 is pinned; some-package>1.2 is not. + """ + specifiers = self.specifier + return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} + + def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool: + if not extras_requested: + # Provide an extra to safely evaluate the markers + # without matching any extra + extras_requested = ("",) + if self.markers is not None: + return any( + self.markers.evaluate({"extra": extra}) for extra in extras_requested + ) + else: + return True + + @property + def has_hash_options(self) -> bool: + """Return whether any known-good hashes are specified as options. + + These activate --require-hashes mode; hashes specified as part of a + URL do not. + + """ + return bool(self.hash_options) + + def hashes(self, trust_internet: bool = True) -> Hashes: + """Return a hash-comparer that considers my option- and URL-based + hashes to be known-good. + + Hashes in URLs--ones embedded in the requirements file, not ones + downloaded from an index server--are almost peers with ones from + flags. They satisfy --require-hashes (whether it was implicitly or + explicitly activated) but do not activate it. md5 and sha224 are not + allowed in flags, which should nudge people toward good algos. We + always OR all hashes together, even ones from URLs. + + :param trust_internet: Whether to trust URL-based (#md5=...) hashes + downloaded from the internet, as by populate_link() + + """ + good_hashes = self.hash_options.copy() + link = self.link if trust_internet else self.original_link + if link and link.hash: + good_hashes.setdefault(link.hash_name, []).append(link.hash) + return Hashes(good_hashes) + + def from_path(self) -> Optional[str]: + """Format a nice indicator to show where this "comes from" """ + if self.req is None: + return None + s = str(self.req) + if self.comes_from: + if isinstance(self.comes_from, str): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += "->" + comes_from + return s + + def ensure_build_location( + self, build_dir: str, autodelete: bool, parallel_builds: bool + ) -> str: + assert build_dir is not None + if self._temp_build_dir is not None: + assert self._temp_build_dir.path + return self._temp_build_dir.path + if self.req is None: + # Some systems have /tmp as a symlink which confuses custom + # builds (such as numpy). Thus, we ensure that the real path + # is returned. + self._temp_build_dir = TempDirectory( + kind=tempdir_kinds.REQ_BUILD, globally_managed=True + ) + + return self._temp_build_dir.path + + # This is the only remaining place where we manually determine the path + # for the temporary directory. It is only needed for editables where + # it is the value of the --src option. + + # When parallel builds are enabled, add a UUID to the build directory + # name so multiple builds do not interfere with each other. + dir_name: str = canonicalize_name(self.name) + if parallel_builds: + dir_name = f"{dir_name}_{uuid.uuid4().hex}" + + # FIXME: Is there a better place to create the build_dir? (hg and bzr + # need this) + if not os.path.exists(build_dir): + logger.debug("Creating directory %s", build_dir) + os.makedirs(build_dir) + actual_build_dir = os.path.join(build_dir, dir_name) + # `None` indicates that we respect the globally-configured deletion + # settings, which is what we actually want when auto-deleting. + delete_arg = None if autodelete else False + return TempDirectory( + path=actual_build_dir, + delete=delete_arg, + kind=tempdir_kinds.REQ_BUILD, + globally_managed=True, + ).path + + def _set_requirement(self) -> None: + """Set requirement after generating metadata.""" + assert self.req is None + assert self.metadata is not None + assert self.source_dir is not None + + # Construct a Requirement object from the generated metadata + if isinstance(parse_version(self.metadata["Version"]), Version): + op = "==" + else: + op = "===" + + self.req = Requirement( + "".join( + [ + self.metadata["Name"], + op, + self.metadata["Version"], + ] + ) + ) + + def warn_on_mismatching_name(self) -> None: + metadata_name = canonicalize_name(self.metadata["Name"]) + if canonicalize_name(self.req.name) == metadata_name: + # Everything is fine. + return + + # If we're here, there's a mismatch. Log a warning about it. + logger.warning( + "Generating metadata for package %s " + "produced metadata for project name %s. Fix your " + "#egg=%s fragments.", + self.name, + metadata_name, + self.name, + ) + self.req = Requirement(metadata_name) + + def check_if_exists(self, use_user_site: bool) -> None: + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.should_reinstall appropriately. + """ + if self.req is None: + return + existing_dist = get_distribution(self.req.name) + if not existing_dist: + return + + # pkg_resouces may contain a different copy of packaging.version from + # pip in if the downstream distributor does a poor job debundling pip. + # We avoid existing_dist.parsed_version and let SpecifierSet.contains + # parses the version instead. + existing_version = existing_dist.version + version_compatible = ( + existing_version is not None + and self.req.specifier.contains(existing_version, prereleases=True) + ) + if not version_compatible: + self.satisfied_by = None + if use_user_site: + if dist_in_usersite(existing_dist): + self.should_reinstall = True + elif running_under_virtualenv() and dist_in_site_packages( + existing_dist + ): + raise InstallationError( + "Will not install to the user site because it will " + "lack sys.path precedence to {} in {}".format( + existing_dist.project_name, existing_dist.location + ) + ) + else: + self.should_reinstall = True + else: + if self.editable: + self.should_reinstall = True + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None + else: + self.satisfied_by = existing_dist + + # Things valid for wheels + @property + def is_wheel(self) -> bool: + if not self.link: + return False + return self.link.is_wheel + + # Things valid for sdists + @property + def unpacked_source_directory(self) -> str: + return os.path.join( + self.source_dir, self.link and self.link.subdirectory_fragment or "" + ) + + @property + def setup_py_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + setup_py = os.path.join(self.unpacked_source_directory, "setup.py") + + return setup_py + + @property + def setup_cfg_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg") + + return setup_cfg + + @property + def pyproject_toml_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + return make_pyproject_path(self.unpacked_source_directory) + + def load_pyproject_toml(self) -> None: + """Load the pyproject.toml file. + + After calling this routine, all of the attributes related to PEP 517 + processing for this requirement have been set. In particular, the + use_pep517 attribute can be used to determine whether we should + follow the PEP 517 or legacy (setup.py) code path. + """ + pyproject_toml_data = load_pyproject_toml( + self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self) + ) + + if pyproject_toml_data is None: + self.use_pep517 = False + return + + self.use_pep517 = True + requires, backend, check, backend_path = pyproject_toml_data + self.requirements_to_check = check + self.pyproject_requires = requires + self.pep517_backend = Pep517HookCaller( + self.unpacked_source_directory, + backend, + backend_path=backend_path, + ) + + def isolated_editable_sanity_check(self) -> None: + """Check that an editable requirement if valid for use with PEP 517/518. + + This verifies that an editable that has a pyproject.toml either supports PEP 660 + or as a setup.py or a setup.cfg + """ + if ( + self.editable + and self.use_pep517 + and not self.supports_pyproject_editable() + and not os.path.isfile(self.setup_py_path) + and not os.path.isfile(self.setup_cfg_path) + ): + raise InstallationError( + f"Project {self} has a 'pyproject.toml' and its build " + f"backend is missing the 'build_editable' hook. Since it does not " + f"have a 'setup.py' nor a 'setup.cfg', " + f"it cannot be installed in editable mode. " + f"Consider using a build backend that supports PEP 660." + ) + + def prepare_metadata(self) -> None: + """Ensure that project metadata is available. + + Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. + Under legacy processing, call setup.py egg-info. + """ + assert self.source_dir + + if self.use_pep517: + assert self.pep517_backend is not None + if ( + self.editable + and self.permit_editable_wheels + and self.supports_pyproject_editable() + ): + self.metadata_directory = generate_editable_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + ) + else: + self.metadata_directory = generate_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + ) + else: + self.metadata_directory = generate_metadata_legacy( + build_env=self.build_env, + setup_py_path=self.setup_py_path, + source_dir=self.unpacked_source_directory, + isolated=self.isolated, + details=self.name or f"from {self.link}", + ) + + # Act on the newly generated metadata, based on the name and version. + if not self.name: + self._set_requirement() + else: + self.warn_on_mismatching_name() + + self.assert_source_matches_version() + + @property + def metadata(self) -> Any: + if not hasattr(self, "_metadata"): + self._metadata = get_metadata(self.get_dist()) + + return self._metadata + + def get_dist(self) -> Distribution: + return _get_dist(self.metadata_directory) + + def assert_source_matches_version(self) -> None: + assert self.source_dir + version = self.metadata["version"] + if self.req.specifier and version not in self.req.specifier: + logger.warning( + "Requested %s, but installing version %s", + self, + version, + ) + else: + logger.debug( + "Source in %s has version %s, which satisfies requirement %s", + display_path(self.source_dir), + version, + self, + ) + + # For both source distributions and editables + def ensure_has_source_dir( + self, + parent_dir: str, + autodelete: bool = False, + parallel_builds: bool = False, + ) -> None: + """Ensure that a source_dir is set. + + This will create a temporary build dir if the name of the requirement + isn't known yet. + + :param parent_dir: The ideal pip parent_dir for the source_dir. + Generally src_dir for editables and build_dir for sdists. + :return: self.source_dir + """ + if self.source_dir is None: + self.source_dir = self.ensure_build_location( + parent_dir, + autodelete=autodelete, + parallel_builds=parallel_builds, + ) + + # For editable installations + def update_editable(self) -> None: + if not self.link: + logger.debug( + "Cannot update repository at %s; repository location is unknown", + self.source_dir, + ) + return + assert self.editable + assert self.source_dir + if self.link.scheme == "file": + # Static paths don't get updated + return + vcs_backend = vcs.get_backend_for_scheme(self.link.scheme) + # Editable requirements are validated in Requirement constructors. + # So here, if it's neither a path nor a valid VCS URL, it's a bug. + assert vcs_backend, f"Unsupported VCS URL {self.link.url}" + hidden_url = hide_url(self.link.url) + vcs_backend.obtain(self.source_dir, url=hidden_url) + + # Top-level Actions + def uninstall( + self, auto_confirm: bool = False, verbose: bool = False + ) -> Optional[UninstallPathSet]: + """ + Uninstall the distribution currently satisfying this requirement. + + Prompts before removing or modifying files unless + ``auto_confirm`` is True. + + Refuses to delete or modify files outside of ``sys.prefix`` - + thus uninstallation within a virtual environment can only + modify that virtual environment, even if the virtualenv is + linked to global site-packages. + + """ + assert self.req + dist = get_distribution(self.req.name) + if not dist: + logger.warning("Skipping %s as it is not installed.", self.name) + return None + logger.info("Found existing installation: %s", dist) + + uninstalled_pathset = UninstallPathSet.from_dist(dist) + uninstalled_pathset.remove(auto_confirm, verbose) + return uninstalled_pathset + + def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str: + def _clean_zip_name(name: str, prefix: str) -> str: + assert name.startswith( + prefix + os.path.sep + ), f"name {name!r} doesn't start with prefix {prefix!r}" + name = name[len(prefix) + 1 :] + name = name.replace(os.path.sep, "/") + return name + + path = os.path.join(parentdir, path) + name = _clean_zip_name(path, rootdir) + return self.name + "/" + name + + def archive(self, build_dir: Optional[str]) -> None: + """Saves archive to provided build_dir. + + Used for saving downloaded VCS requirements as part of `pip download`. + """ + assert self.source_dir + if build_dir is None: + return + + create_archive = True + archive_name = "{}-{}.zip".format(self.name, self.metadata["version"]) + archive_path = os.path.join(build_dir, archive_name) + + if os.path.exists(archive_path): + response = ask_path_exists( + "The file {} exists. (i)gnore, (w)ipe, " + "(b)ackup, (a)bort ".format(display_path(archive_path)), + ("i", "w", "b", "a"), + ) + if response == "i": + create_archive = False + elif response == "w": + logger.warning("Deleting %s", display_path(archive_path)) + os.remove(archive_path) + elif response == "b": + dest_file = backup_dir(archive_path) + logger.warning( + "Backing up %s to %s", + display_path(archive_path), + display_path(dest_file), + ) + shutil.move(archive_path, dest_file) + elif response == "a": + sys.exit(-1) + + if not create_archive: + return + + zip_output = zipfile.ZipFile( + archive_path, + "w", + zipfile.ZIP_DEFLATED, + allowZip64=True, + ) + with zip_output: + dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory)) + for dirpath, dirnames, filenames in os.walk(dir): + for dirname in dirnames: + dir_arcname = self._get_archive_name( + dirname, + parentdir=dirpath, + rootdir=dir, + ) + zipdir = zipfile.ZipInfo(dir_arcname + "/") + zipdir.external_attr = 0x1ED << 16 # 0o755 + zip_output.writestr(zipdir, "") + for filename in filenames: + file_arcname = self._get_archive_name( + filename, + parentdir=dirpath, + rootdir=dir, + ) + filename = os.path.join(dirpath, filename) + zip_output.write(filename, file_arcname) + + logger.info("Saved %s", display_path(archive_path)) + + def install( + self, + install_options: List[str], + global_options: Optional[Sequence[str]] = None, + root: Optional[str] = None, + home: Optional[str] = None, + prefix: Optional[str] = None, + warn_script_location: bool = True, + use_user_site: bool = False, + pycompile: bool = True, + ) -> None: + scheme = get_scheme( + self.name, + user=use_user_site, + home=home, + root=root, + isolated=self.isolated, + prefix=prefix, + ) + + global_options = global_options if global_options is not None else [] + if self.editable and not self.is_wheel: + install_editable_legacy( + install_options, + global_options, + prefix=prefix, + home=home, + use_user_site=use_user_site, + name=self.name, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, + ) + self.install_succeeded = True + return + + if self.is_wheel: + assert self.local_file_path + direct_url = None + if self.editable: + direct_url = direct_url_for_editable(self.unpacked_source_directory) + elif self.original_link: + direct_url = direct_url_from_link( + self.original_link, + self.source_dir, + self.original_link_is_in_wheel_cache, + ) + install_wheel( + self.name, + self.local_file_path, + scheme=scheme, + req_description=str(self.req), + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=direct_url, + requested=self.user_supplied, + ) + self.install_succeeded = True + return + + # TODO: Why don't we do this for editable installs? + + # Extend the list of global and install options passed on to + # the setup.py call with the ones from the requirements file. + # Options specified in requirements file override those + # specified on the command line, since the last option given + # to setup.py is the one that is used. + global_options = list(global_options) + self.global_options + install_options = list(install_options) + self.install_options + + try: + success = install_legacy( + install_options=install_options, + global_options=global_options, + root=root, + home=home, + prefix=prefix, + use_user_site=use_user_site, + pycompile=pycompile, + scheme=scheme, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + req_name=self.name, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, + req_description=str(self.req), + ) + except LegacyInstallFailure as exc: + self.install_succeeded = False + raise exc.__cause__ + except Exception: + self.install_succeeded = True + raise + + self.install_succeeded = success + + if success and self.legacy_install_reason == 8368: + deprecated( + reason=( + "{} was installed using the legacy 'setup.py install' " + "method, because a wheel could not be built for it.".format( + self.name + ) + ), + replacement="to fix the wheel build issue reported above", + gone_in=None, + issue=8368, + ) + + +def check_invalid_constraint_type(req: InstallRequirement) -> str: + + # Check for unsupported forms + problem = "" + if not req.name: + problem = "Unnamed requirements are not allowed as constraints" + elif req.editable: + problem = "Editable requirements are not allowed as constraints" + elif req.extras: + problem = "Constraints cannot have extras" + + if problem: + deprecated( + reason=( + "Constraints are only allowed to take the form of a package " + "name and a version specifier. Other forms were originally " + "permitted as an accident of the implementation, but were " + "undocumented. The new implementation of the resolver no " + "longer supports these forms." + ), + replacement="replacing the constraint with a requirement", + # No plan yet for when the new resolver becomes default + gone_in=None, + issue=8210, + ) + + return problem diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/req_set.py b/.venv/lib/python3.9/site-packages/pip/_internal/req/req_set.py new file mode 100644 index 0000000..6626c37 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/req/req_set.py @@ -0,0 +1,189 @@ +import logging +from collections import OrderedDict +from typing import Dict, Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InstallationError +from pip._internal.models.wheel import Wheel +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils import compatibility_tags + +logger = logging.getLogger(__name__) + + +class RequirementSet: + def __init__(self, check_supported_wheels: bool = True) -> None: + """Create a RequirementSet.""" + + self.requirements: Dict[str, InstallRequirement] = OrderedDict() + self.check_supported_wheels = check_supported_wheels + + self.unnamed_requirements: List[InstallRequirement] = [] + + def __str__(self) -> str: + requirements = sorted( + (req for req in self.requirements.values() if not req.comes_from), + key=lambda req: canonicalize_name(req.name or ""), + ) + return " ".join(str(req.req) for req in requirements) + + def __repr__(self) -> str: + requirements = sorted( + self.requirements.values(), + key=lambda req: canonicalize_name(req.name or ""), + ) + + format_string = "<{classname} object; {count} requirement(s): {reqs}>" + return format_string.format( + classname=self.__class__.__name__, + count=len(requirements), + reqs=", ".join(str(req.req) for req in requirements), + ) + + def add_unnamed_requirement(self, install_req: InstallRequirement) -> None: + assert not install_req.name + self.unnamed_requirements.append(install_req) + + def add_named_requirement(self, install_req: InstallRequirement) -> None: + assert install_req.name + + project_name = canonicalize_name(install_req.name) + self.requirements[project_name] = install_req + + def add_requirement( + self, + install_req: InstallRequirement, + parent_req_name: Optional[str] = None, + extras_requested: Optional[Iterable[str]] = None, + ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]: + """Add install_req as a requirement to install. + + :param parent_req_name: The name of the requirement that needed this + added. The name is used because when multiple unnamed requirements + resolve to the same name, we could otherwise end up with dependency + links that point outside the Requirements set. parent_req must + already be added. Note that None implies that this is a user + supplied requirement, vs an inferred one. + :param extras_requested: an iterable of extras used to evaluate the + environment markers. + :return: Additional requirements to scan. That is either [] if + the requirement is not applicable, or [install_req] if the + requirement is applicable and has just been added. + """ + # If the markers do not match, ignore this requirement. + if not install_req.match_markers(extras_requested): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + install_req.name, + install_req.markers, + ) + return [], None + + # If the wheel is not supported, raise an error. + # Should check this after filtering out based on environment markers to + # allow specifying different wheels based on the environment/OS, in a + # single requirements file. + if install_req.link and install_req.link.is_wheel: + wheel = Wheel(install_req.link.filename) + tags = compatibility_tags.get_supported() + if self.check_supported_wheels and not wheel.supported(tags): + raise InstallationError( + "{} is not a supported wheel on this platform.".format( + wheel.filename + ) + ) + + # This next bit is really a sanity check. + assert ( + not install_req.user_supplied or parent_req_name is None + ), "a user supplied req shouldn't have a parent" + + # Unnamed requirements are scanned again and the requirement won't be + # added as a dependency until after scanning. + if not install_req.name: + self.add_unnamed_requirement(install_req) + return [install_req], None + + try: + existing_req: Optional[InstallRequirement] = self.get_requirement( + install_req.name + ) + except KeyError: + existing_req = None + + has_conflicting_requirement = ( + parent_req_name is None + and existing_req + and not existing_req.constraint + and existing_req.extras == install_req.extras + and existing_req.req + and install_req.req + and existing_req.req.specifier != install_req.req.specifier + ) + if has_conflicting_requirement: + raise InstallationError( + "Double requirement given: {} (already in {}, name={!r})".format( + install_req, existing_req, install_req.name + ) + ) + + # When no existing requirement exists, add the requirement as a + # dependency and it will be scanned again after. + if not existing_req: + self.add_named_requirement(install_req) + # We'd want to rescan this requirement later + return [install_req], install_req + + # Assume there's no need to scan, and that we've already + # encountered this for scanning. + if install_req.constraint or not existing_req.constraint: + return [], existing_req + + does_not_satisfy_constraint = install_req.link and not ( + existing_req.link and install_req.link.path == existing_req.link.path + ) + if does_not_satisfy_constraint: + raise InstallationError( + "Could not satisfy constraints for '{}': " + "installation from path or url cannot be " + "constrained to a version".format(install_req.name) + ) + # If we're now installing a constraint, mark the existing + # object for real installation. + existing_req.constraint = False + # If we're now installing a user supplied requirement, + # mark the existing object as such. + if install_req.user_supplied: + existing_req.user_supplied = True + existing_req.extras = tuple( + sorted(set(existing_req.extras) | set(install_req.extras)) + ) + logger.debug( + "Setting %s extras to: %s", + existing_req, + existing_req.extras, + ) + # Return the existing requirement for addition to the parent and + # scanning again. + return [existing_req], existing_req + + def has_requirement(self, name: str) -> bool: + project_name = canonicalize_name(name) + + return ( + project_name in self.requirements + and not self.requirements[project_name].constraint + ) + + def get_requirement(self, name: str) -> InstallRequirement: + project_name = canonicalize_name(name) + + if project_name in self.requirements: + return self.requirements[project_name] + + raise KeyError(f"No project with the name {name!r}") + + @property + def all_requirements(self) -> List[InstallRequirement]: + return self.unnamed_requirements + list(self.requirements.values()) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/req_tracker.py b/.venv/lib/python3.9/site-packages/pip/_internal/req/req_tracker.py new file mode 100644 index 0000000..24d3c53 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/req/req_tracker.py @@ -0,0 +1,124 @@ +import contextlib +import hashlib +import logging +import os +from types import TracebackType +from typing import Dict, Iterator, Optional, Set, Type, Union + +from pip._internal.models.link import Link +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +@contextlib.contextmanager +def update_env_context_manager(**changes: str) -> Iterator[None]: + target = os.environ + + # Save values from the target and change them. + non_existent_marker = object() + saved_values: Dict[str, Union[object, str]] = {} + for name, new_value in changes.items(): + try: + saved_values[name] = target[name] + except KeyError: + saved_values[name] = non_existent_marker + target[name] = new_value + + try: + yield + finally: + # Restore original values in the target. + for name, original_value in saved_values.items(): + if original_value is non_existent_marker: + del target[name] + else: + assert isinstance(original_value, str) # for mypy + target[name] = original_value + + +@contextlib.contextmanager +def get_requirement_tracker() -> Iterator["RequirementTracker"]: + root = os.environ.get("PIP_REQ_TRACKER") + with contextlib.ExitStack() as ctx: + if root is None: + root = ctx.enter_context(TempDirectory(kind="req-tracker")).path + ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root)) + logger.debug("Initialized build tracking at %s", root) + + with RequirementTracker(root) as tracker: + yield tracker + + +class RequirementTracker: + def __init__(self, root: str) -> None: + self._root = root + self._entries: Set[InstallRequirement] = set() + logger.debug("Created build tracker: %s", self._root) + + def __enter__(self) -> "RequirementTracker": + logger.debug("Entered build tracker: %s", self._root) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.cleanup() + + def _entry_path(self, link: Link) -> str: + hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() + return os.path.join(self._root, hashed) + + def add(self, req: InstallRequirement) -> None: + """Add an InstallRequirement to build tracking.""" + + assert req.link + # Get the file to write information about this requirement. + entry_path = self._entry_path(req.link) + + # Try reading from the file. If it exists and can be read from, a build + # is already in progress, so a LookupError is raised. + try: + with open(entry_path) as fp: + contents = fp.read() + except FileNotFoundError: + pass + else: + message = "{} is already being built: {}".format(req.link, contents) + raise LookupError(message) + + # If we're here, req should really not be building already. + assert req not in self._entries + + # Start tracking this requirement. + with open(entry_path, "w", encoding="utf-8") as fp: + fp.write(str(req)) + self._entries.add(req) + + logger.debug("Added %s to build tracker %r", req, self._root) + + def remove(self, req: InstallRequirement) -> None: + """Remove an InstallRequirement from build tracking.""" + + assert req.link + # Delete the created file and the corresponding entries. + os.unlink(self._entry_path(req.link)) + self._entries.remove(req) + + logger.debug("Removed %s from build tracker %r", req, self._root) + + def cleanup(self) -> None: + for req in set(self._entries): + self.remove(req) + + logger.debug("Removed build tracker: %r", self._root) + + @contextlib.contextmanager + def track(self, req: InstallRequirement) -> Iterator[None]: + self.add(req) + yield + self.remove(req) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/req/req_uninstall.py b/.venv/lib/python3.9/site-packages/pip/_internal/req/req_uninstall.py new file mode 100644 index 0000000..779e93b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/req/req_uninstall.py @@ -0,0 +1,633 @@ +import csv +import functools +import os +import sys +import sysconfig +from importlib.util import cache_from_source +from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple + +from pip._vendor import pkg_resources +from pip._vendor.pkg_resources import Distribution + +from pip._internal.exceptions import UninstallationError +from pip._internal.locations import get_bin_prefix, get_bin_user +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.egg_link import egg_link_path_from_location +from pip._internal.utils.logging import getLogger, indent_log +from pip._internal.utils.misc import ( + ask, + dist_in_usersite, + dist_is_local, + is_local, + normalize_path, + renames, + rmtree, +) +from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory + +logger = getLogger(__name__) + + +def _script_names(dist: Distribution, script_name: str, is_gui: bool) -> List[str]: + """Create the fully qualified name of the files created by + {console,gui}_scripts for the given ``dist``. + Returns the list of file names + """ + if dist_in_usersite(dist): + bin_dir = get_bin_user() + else: + bin_dir = get_bin_prefix() + exe_name = os.path.join(bin_dir, script_name) + paths_to_remove = [exe_name] + if WINDOWS: + paths_to_remove.append(exe_name + ".exe") + paths_to_remove.append(exe_name + ".exe.manifest") + if is_gui: + paths_to_remove.append(exe_name + "-script.pyw") + else: + paths_to_remove.append(exe_name + "-script.py") + return paths_to_remove + + +def _unique(fn: Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]: + @functools.wraps(fn) + def unique(*args: Any, **kw: Any) -> Iterator[Any]: + seen: Set[Any] = set() + for item in fn(*args, **kw): + if item not in seen: + seen.add(item) + yield item + + return unique + + +@_unique +def uninstallation_paths(dist: Distribution) -> Iterator[str]: + """ + Yield all the uninstallation paths for dist based on RECORD-without-.py[co] + + Yield paths to all the files in RECORD. For each .py file in RECORD, add + the .pyc and .pyo in the same directory. + + UninstallPathSet.add() takes care of the __pycache__ .py[co]. + + If RECORD is not found, raises UninstallationError, + with possible information from the INSTALLER file. + + https://packaging.python.org/specifications/recording-installed-packages/ + """ + try: + r = csv.reader(dist.get_metadata_lines("RECORD")) + except FileNotFoundError as missing_record_exception: + msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist) + try: + installer = next(dist.get_metadata_lines("INSTALLER")) + if not installer or installer == "pip": + raise ValueError() + except (OSError, StopIteration, ValueError): + dep = "{}=={}".format(dist.project_name, dist.version) + msg += ( + " You might be able to recover from this via: " + "'pip install --force-reinstall --no-deps {}'.".format(dep) + ) + else: + msg += " Hint: The package was installed by {}.".format(installer) + raise UninstallationError(msg) from missing_record_exception + for row in r: + path = os.path.join(dist.location, row[0]) + yield path + if path.endswith(".py"): + dn, fn = os.path.split(path) + base = fn[:-3] + path = os.path.join(dn, base + ".pyc") + yield path + path = os.path.join(dn, base + ".pyo") + yield path + + +def compact(paths: Iterable[str]) -> Set[str]: + """Compact a path set to contain the minimal number of paths + necessary to contain all paths in the set. If /a/path/ and + /a/path/to/a/file.txt are both in the set, leave only the + shorter path.""" + + sep = os.path.sep + short_paths: Set[str] = set() + for path in sorted(paths, key=len): + should_skip = any( + path.startswith(shortpath.rstrip("*")) + and path[len(shortpath.rstrip("*").rstrip(sep))] == sep + for shortpath in short_paths + ) + if not should_skip: + short_paths.add(path) + return short_paths + + +def compress_for_rename(paths: Iterable[str]) -> Set[str]: + """Returns a set containing the paths that need to be renamed. + + This set may include directories when the original sequence of paths + included every file on disk. + """ + case_map = {os.path.normcase(p): p for p in paths} + remaining = set(case_map) + unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len) + wildcards: Set[str] = set() + + def norm_join(*a: str) -> str: + return os.path.normcase(os.path.join(*a)) + + for root in unchecked: + if any(os.path.normcase(root).startswith(w) for w in wildcards): + # This directory has already been handled. + continue + + all_files: Set[str] = set() + all_subdirs: Set[str] = set() + for dirname, subdirs, files in os.walk(root): + all_subdirs.update(norm_join(root, dirname, d) for d in subdirs) + all_files.update(norm_join(root, dirname, f) for f in files) + # If all the files we found are in our remaining set of files to + # remove, then remove them from the latter set and add a wildcard + # for the directory. + if not (all_files - remaining): + remaining.difference_update(all_files) + wildcards.add(root + os.sep) + + return set(map(case_map.__getitem__, remaining)) | wildcards + + +def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]: + """Returns a tuple of 2 sets of which paths to display to user + + The first set contains paths that would be deleted. Files of a package + are not added and the top-level directory of the package has a '*' added + at the end - to signify that all it's contents are removed. + + The second set contains files that would have been skipped in the above + folders. + """ + + will_remove = set(paths) + will_skip = set() + + # Determine folders and files + folders = set() + files = set() + for path in will_remove: + if path.endswith(".pyc"): + continue + if path.endswith("__init__.py") or ".dist-info" in path: + folders.add(os.path.dirname(path)) + files.add(path) + + # probably this one https://github.com/python/mypy/issues/390 + _normcased_files = set(map(os.path.normcase, files)) # type: ignore + + folders = compact(folders) + + # This walks the tree using os.walk to not miss extra folders + # that might get added. + for folder in folders: + for dirpath, _, dirfiles in os.walk(folder): + for fname in dirfiles: + if fname.endswith(".pyc"): + continue + + file_ = os.path.join(dirpath, fname) + if ( + os.path.isfile(file_) + and os.path.normcase(file_) not in _normcased_files + ): + # We are skipping this file. Add it to the set. + will_skip.add(file_) + + will_remove = files | {os.path.join(folder, "*") for folder in folders} + + return will_remove, will_skip + + +class StashedUninstallPathSet: + """A set of file rename operations to stash files while + tentatively uninstalling them.""" + + def __init__(self) -> None: + # Mapping from source file root to [Adjacent]TempDirectory + # for files under that directory. + self._save_dirs: Dict[str, TempDirectory] = {} + # (old path, new path) tuples for each move that may need + # to be undone. + self._moves: List[Tuple[str, str]] = [] + + def _get_directory_stash(self, path: str) -> str: + """Stashes a directory. + + Directories are stashed adjacent to their original location if + possible, or else moved/copied into the user's temp dir.""" + + try: + save_dir: TempDirectory = AdjacentTempDirectory(path) + except OSError: + save_dir = TempDirectory(kind="uninstall") + self._save_dirs[os.path.normcase(path)] = save_dir + + return save_dir.path + + def _get_file_stash(self, path: str) -> str: + """Stashes a file. + + If no root has been provided, one will be created for the directory + in the user's temp directory.""" + path = os.path.normcase(path) + head, old_head = os.path.dirname(path), None + save_dir = None + + while head != old_head: + try: + save_dir = self._save_dirs[head] + break + except KeyError: + pass + head, old_head = os.path.dirname(head), head + else: + # Did not find any suitable root + head = os.path.dirname(path) + save_dir = TempDirectory(kind="uninstall") + self._save_dirs[head] = save_dir + + relpath = os.path.relpath(path, head) + if relpath and relpath != os.path.curdir: + return os.path.join(save_dir.path, relpath) + return save_dir.path + + def stash(self, path: str) -> str: + """Stashes the directory or file and returns its new location. + Handle symlinks as files to avoid modifying the symlink targets. + """ + path_is_dir = os.path.isdir(path) and not os.path.islink(path) + if path_is_dir: + new_path = self._get_directory_stash(path) + else: + new_path = self._get_file_stash(path) + + self._moves.append((path, new_path)) + if path_is_dir and os.path.isdir(new_path): + # If we're moving a directory, we need to + # remove the destination first or else it will be + # moved to inside the existing directory. + # We just created new_path ourselves, so it will + # be removable. + os.rmdir(new_path) + renames(path, new_path) + return new_path + + def commit(self) -> None: + """Commits the uninstall by removing stashed files.""" + for _, save_dir in self._save_dirs.items(): + save_dir.cleanup() + self._moves = [] + self._save_dirs = {} + + def rollback(self) -> None: + """Undoes the uninstall by moving stashed files back.""" + for p in self._moves: + logger.info("Moving to %s\n from %s", *p) + + for new_path, path in self._moves: + try: + logger.debug("Replacing %s from %s", new_path, path) + if os.path.isfile(new_path) or os.path.islink(new_path): + os.unlink(new_path) + elif os.path.isdir(new_path): + rmtree(new_path) + renames(path, new_path) + except OSError as ex: + logger.error("Failed to restore %s", new_path) + logger.debug("Exception: %s", ex) + + self.commit() + + @property + def can_rollback(self) -> bool: + return bool(self._moves) + + +class UninstallPathSet: + """A set of file paths to be removed in the uninstallation of a + requirement.""" + + def __init__(self, dist: Distribution) -> None: + self.paths: Set[str] = set() + self._refuse: Set[str] = set() + self.pth: Dict[str, UninstallPthEntries] = {} + self.dist = dist + self._moved_paths = StashedUninstallPathSet() + + def _permitted(self, path: str) -> bool: + """ + Return True if the given path is one we are permitted to + remove/modify, False otherwise. + + """ + return is_local(path) + + def add(self, path: str) -> None: + head, tail = os.path.split(path) + + # we normalize the head to resolve parent directory symlinks, but not + # the tail, since we only want to uninstall symlinks, not their targets + path = os.path.join(normalize_path(head), os.path.normcase(tail)) + + if not os.path.exists(path): + return + if self._permitted(path): + self.paths.add(path) + else: + self._refuse.add(path) + + # __pycache__ files can show up after 'installed-files.txt' is created, + # due to imports + if os.path.splitext(path)[1] == ".py": + self.add(cache_from_source(path)) + + def add_pth(self, pth_file: str, entry: str) -> None: + pth_file = normalize_path(pth_file) + if self._permitted(pth_file): + if pth_file not in self.pth: + self.pth[pth_file] = UninstallPthEntries(pth_file) + self.pth[pth_file].add(entry) + else: + self._refuse.add(pth_file) + + def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None: + """Remove paths in ``self.paths`` with confirmation (unless + ``auto_confirm`` is True).""" + + if not self.paths: + logger.info( + "Can't uninstall '%s'. No files were found to uninstall.", + self.dist.project_name, + ) + return + + dist_name_version = self.dist.project_name + "-" + self.dist.version + logger.info("Uninstalling %s:", dist_name_version) + + with indent_log(): + if auto_confirm or self._allowed_to_proceed(verbose): + moved = self._moved_paths + + for_rename = compress_for_rename(self.paths) + + for path in sorted(compact(for_rename)): + moved.stash(path) + logger.verbose("Removing file or directory %s", path) + + for pth in self.pth.values(): + pth.remove() + + logger.info("Successfully uninstalled %s", dist_name_version) + + def _allowed_to_proceed(self, verbose: bool) -> bool: + """Display which files would be deleted and prompt for confirmation""" + + def _display(msg: str, paths: Iterable[str]) -> None: + if not paths: + return + + logger.info(msg) + with indent_log(): + for path in sorted(compact(paths)): + logger.info(path) + + if not verbose: + will_remove, will_skip = compress_for_output_listing(self.paths) + else: + # In verbose mode, display all the files that are going to be + # deleted. + will_remove = set(self.paths) + will_skip = set() + + _display("Would remove:", will_remove) + _display("Would not remove (might be manually added):", will_skip) + _display("Would not remove (outside of prefix):", self._refuse) + if verbose: + _display("Will actually move:", compress_for_rename(self.paths)) + + return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n" + + def rollback(self) -> None: + """Rollback the changes previously made by remove().""" + if not self._moved_paths.can_rollback: + logger.error( + "Can't roll back %s; was not uninstalled", + self.dist.project_name, + ) + return + logger.info("Rolling back uninstall of %s", self.dist.project_name) + self._moved_paths.rollback() + for pth in self.pth.values(): + pth.rollback() + + def commit(self) -> None: + """Remove temporary save dir: rollback will no longer be possible.""" + self._moved_paths.commit() + + @classmethod + def from_dist(cls, dist: Distribution) -> "UninstallPathSet": + dist_path = normalize_path(dist.location) + if not dist_is_local(dist): + logger.info( + "Not uninstalling %s at %s, outside environment %s", + dist.key, + dist_path, + sys.prefix, + ) + return cls(dist) + + if dist_path in { + p + for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")} + if p + }: + logger.info( + "Not uninstalling %s at %s, as it is in the standard library.", + dist.key, + dist_path, + ) + return cls(dist) + + paths_to_remove = cls(dist) + develop_egg_link = egg_link_path_from_location(dist.project_name) + develop_egg_link_egg_info = "{}.egg-info".format( + pkg_resources.to_filename(dist.project_name) + ) + egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) + # Special case for distutils installed package + distutils_egg_info = getattr(dist._provider, "path", None) + + # Uninstall cases order do matter as in the case of 2 installs of the + # same package, pip needs to uninstall the currently detected version + if ( + egg_info_exists + and dist.egg_info.endswith(".egg-info") + and not dist.egg_info.endswith(develop_egg_link_egg_info) + ): + # if dist.egg_info.endswith(develop_egg_link_egg_info), we + # are in fact in the develop_egg_link case + paths_to_remove.add(dist.egg_info) + if dist.has_metadata("installed-files.txt"): + for installed_file in dist.get_metadata( + "installed-files.txt" + ).splitlines(): + path = os.path.normpath(os.path.join(dist.egg_info, installed_file)) + paths_to_remove.add(path) + # FIXME: need a test for this elif block + # occurs with --single-version-externally-managed/--record outside + # of pip + elif dist.has_metadata("top_level.txt"): + if dist.has_metadata("namespace_packages.txt"): + namespaces = dist.get_metadata("namespace_packages.txt") + else: + namespaces = [] + for top_level_pkg in [ + p + for p in dist.get_metadata("top_level.txt").splitlines() + if p and p not in namespaces + ]: + path = os.path.join(dist.location, top_level_pkg) + paths_to_remove.add(path) + paths_to_remove.add(path + ".py") + paths_to_remove.add(path + ".pyc") + paths_to_remove.add(path + ".pyo") + + elif distutils_egg_info: + raise UninstallationError( + "Cannot uninstall {!r}. It is a distutils installed project " + "and thus we cannot accurately determine which files belong " + "to it which would lead to only a partial uninstall.".format( + dist.project_name, + ) + ) + + elif dist.location.endswith(".egg"): + # package installed by easy_install + # We cannot match on dist.egg_name because it can slightly vary + # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg + paths_to_remove.add(dist.location) + easy_install_egg = os.path.split(dist.location)[1] + easy_install_pth = os.path.join( + os.path.dirname(dist.location), "easy-install.pth" + ) + paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg) + + elif egg_info_exists and dist.egg_info.endswith(".dist-info"): + for path in uninstallation_paths(dist): + paths_to_remove.add(path) + + elif develop_egg_link: + # develop egg + with open(develop_egg_link) as fh: + link_pointer = os.path.normcase(fh.readline().strip()) + assert ( + link_pointer == dist.location + ), "Egg-link {} does not match installed location of {} (at {})".format( + link_pointer, dist.project_name, dist.location + ) + paths_to_remove.add(develop_egg_link) + easy_install_pth = os.path.join( + os.path.dirname(develop_egg_link), "easy-install.pth" + ) + paths_to_remove.add_pth(easy_install_pth, dist.location) + + else: + logger.debug( + "Not sure how to uninstall: %s - Check: %s", + dist, + dist.location, + ) + + # find distutils scripts= scripts + if dist.has_metadata("scripts") and dist.metadata_isdir("scripts"): + for script in dist.metadata_listdir("scripts"): + if dist_in_usersite(dist): + bin_dir = get_bin_user() + else: + bin_dir = get_bin_prefix() + paths_to_remove.add(os.path.join(bin_dir, script)) + if WINDOWS: + paths_to_remove.add(os.path.join(bin_dir, script) + ".bat") + + # find console_scripts + _scripts_to_remove = [] + console_scripts = dist.get_entry_map(group="console_scripts") + for name in console_scripts.keys(): + _scripts_to_remove.extend(_script_names(dist, name, False)) + # find gui_scripts + gui_scripts = dist.get_entry_map(group="gui_scripts") + for name in gui_scripts.keys(): + _scripts_to_remove.extend(_script_names(dist, name, True)) + + for s in _scripts_to_remove: + paths_to_remove.add(s) + + return paths_to_remove + + +class UninstallPthEntries: + def __init__(self, pth_file: str) -> None: + self.file = pth_file + self.entries: Set[str] = set() + self._saved_lines: Optional[List[bytes]] = None + + def add(self, entry: str) -> None: + entry = os.path.normcase(entry) + # On Windows, os.path.normcase converts the entry to use + # backslashes. This is correct for entries that describe absolute + # paths outside of site-packages, but all the others use forward + # slashes. + # os.path.splitdrive is used instead of os.path.isabs because isabs + # treats non-absolute paths with drive letter markings like c:foo\bar + # as absolute paths. It also does not recognize UNC paths if they don't + # have more than "\\sever\share". Valid examples: "\\server\share\" or + # "\\server\share\folder". + if WINDOWS and not os.path.splitdrive(entry)[0]: + entry = entry.replace("\\", "/") + self.entries.add(entry) + + def remove(self) -> None: + logger.verbose("Removing pth entries from %s:", self.file) + + # If the file doesn't exist, log a warning and return + if not os.path.isfile(self.file): + logger.warning("Cannot remove entries from nonexistent file %s", self.file) + return + with open(self.file, "rb") as fh: + # windows uses '\r\n' with py3k, but uses '\n' with py2.x + lines = fh.readlines() + self._saved_lines = lines + if any(b"\r\n" in line for line in lines): + endline = "\r\n" + else: + endline = "\n" + # handle missing trailing newline + if lines and not lines[-1].endswith(endline.encode("utf-8")): + lines[-1] = lines[-1] + endline.encode("utf-8") + for entry in self.entries: + try: + logger.verbose("Removing entry: %s", entry) + lines.remove((entry + endline).encode("utf-8")) + except ValueError: + pass + with open(self.file, "wb") as fh: + fh.writelines(lines) + + def rollback(self) -> bool: + if self._saved_lines is None: + logger.error("Cannot roll back changes to %s, none were made", self.file) + return False + logger.debug("Rolling %s back to previous state", self.file) + with open(self.file, "wb") as fh: + fh.writelines(self._saved_lines) + return True diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..b154c34 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..1a195bd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/base.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/base.py new file mode 100644 index 0000000..42dade1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/base.py @@ -0,0 +1,20 @@ +from typing import Callable, List, Optional + +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_set import RequirementSet + +InstallRequirementProvider = Callable[ + [str, Optional[InstallRequirement]], InstallRequirement +] + + +class BaseResolver: + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + raise NotImplementedError() + + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: + raise NotImplementedError() diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..c68790f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc new file mode 100644 index 0000000..faf05a6 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/resolver.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/resolver.py new file mode 100644 index 0000000..09caaa6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/resolver.py @@ -0,0 +1,467 @@ +"""Dependency Resolution + +The dependency resolution in pip is performed as follows: + +for top-level requirements: + a. only one spec allowed per project, regardless of conflicts or not. + otherwise a "double requirement" exception is raised + b. they override sub-dependency requirements. +for sub-dependencies + a. "first found, wins" (where the order is breadth first) +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import sys +from collections import defaultdict +from itertools import chain +from typing import DefaultDict, Iterable, List, Optional, Set, Tuple + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.requirements import Requirement + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + HashError, + HashErrors, + NoneMetadataError, + UnsupportedPythonVersion, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution +from pip._internal.models.link import Link +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import dist_in_usersite, normalize_version_info +from pip._internal.utils.packaging import check_requires_python + +logger = logging.getLogger(__name__) + +DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]] + + +def _check_dist_requires_python( + dist: BaseDistribution, + version_info: Tuple[int, int, int], + ignore_requires_python: bool = False, +) -> None: + """ + Check whether the given Python version is compatible with a distribution's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + + :raises UnsupportedPythonVersion: When the given Python version isn't + compatible. + """ + # This idiosyncratically converts the SpecifierSet to str and let + # check_requires_python then parse it again into SpecifierSet. But this + # is the legacy resolver so I'm just not going to bother refactoring. + try: + requires_python = str(dist.requires_python) + except FileNotFoundError as e: + raise NoneMetadataError(dist, str(e)) + try: + is_compatible = check_requires_python( + requires_python, + version_info=version_info, + ) + except specifiers.InvalidSpecifier as exc: + logger.warning( + "Package %r has an invalid Requires-Python: %s", dist.raw_name, exc + ) + return + + if is_compatible: + return + + version = ".".join(map(str, version_info)) + if ignore_requires_python: + logger.debug( + "Ignoring failed Requires-Python check for package %r: %s not in %r", + dist.raw_name, + version, + requires_python, + ) + return + + raise UnsupportedPythonVersion( + "Package {!r} requires a different Python: {} not in {!r}".format( + dist.raw_name, version, requires_python + ) + ) + + +class Resolver(BaseResolver): + """Resolves which packages need to be installed/uninstalled to perform \ + the requested operation without breaking the requirements of any package. + """ + + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer: RequirementPreparer, + finder: PackageFinder, + wheel_cache: Optional[WheelCache], + make_install_req: InstallRequirementProvider, + use_user_site: bool, + ignore_dependencies: bool, + ignore_installed: bool, + ignore_requires_python: bool, + force_reinstall: bool, + upgrade_strategy: str, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> None: + super().__init__() + assert upgrade_strategy in self._allowed_strategies + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + self._py_version_info = py_version_info + + self.preparer = preparer + self.finder = finder + self.wheel_cache = wheel_cache + + self.upgrade_strategy = upgrade_strategy + self.force_reinstall = force_reinstall + self.ignore_dependencies = ignore_dependencies + self.ignore_installed = ignore_installed + self.ignore_requires_python = ignore_requires_python + self.use_user_site = use_user_site + self._make_install_req = make_install_req + + self._discovered_dependencies: DiscoveredDependencies = defaultdict(list) + + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + """Resolve what operations need to be done + + As a side-effect of this method, the packages (and their dependencies) + are downloaded, unpacked and prepared for installation. This + preparation is done by ``pip.operations.prepare``. + + Once PyPI has static dependency metadata available, it would be + possible to move the preparation to become a step separated from + dependency resolution. + """ + requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels) + for req in root_reqs: + if req.constraint: + check_invalid_constraint_type(req) + requirement_set.add_requirement(req) + + # Actually prepare the files, and collect any exceptions. Most hash + # exceptions cannot be checked ahead of time, because + # _populate_link() needs to be called before we can make decisions + # based on link type. + discovered_reqs: List[InstallRequirement] = [] + hash_errors = HashErrors() + for req in chain(requirement_set.all_requirements, discovered_reqs): + try: + discovered_reqs.extend(self._resolve_one(requirement_set, req)) + except HashError as exc: + exc.req = req + hash_errors.append(exc) + + if hash_errors: + raise hash_errors + + return requirement_set + + def _is_upgrade_allowed(self, req: InstallRequirement) -> bool: + if self.upgrade_strategy == "to-satisfy-only": + return False + elif self.upgrade_strategy == "eager": + return True + else: + assert self.upgrade_strategy == "only-if-needed" + return req.user_supplied or req.constraint + + def _set_req_to_reinstall(self, req: InstallRequirement) -> None: + """ + Set a requirement to be installed. + """ + # Don't uninstall the conflict if doing a user install and the + # conflict is not a user install. + if not self.use_user_site or dist_in_usersite(req.satisfied_by): + req.should_reinstall = True + req.satisfied_by = None + + def _check_skip_installed( + self, req_to_install: InstallRequirement + ) -> Optional[str]: + """Check if req_to_install should be skipped. + + This will check if the req is installed, and whether we should upgrade + or reinstall it, taking into account all the relevant user options. + + After calling this req_to_install will only have satisfied_by set to + None if the req_to_install is to be upgraded/reinstalled etc. Any + other value will be a dist recording the current thing installed that + satisfies the requirement. + + Note that for vcs urls and the like we can't assess skipping in this + routine - we simply identify that we need to pull the thing down, + then later on it is pulled down and introspected to assess upgrade/ + reinstalls etc. + + :return: A text reason for why it was skipped, or None. + """ + if self.ignore_installed: + return None + + req_to_install.check_if_exists(self.use_user_site) + if not req_to_install.satisfied_by: + return None + + if self.force_reinstall: + self._set_req_to_reinstall(req_to_install) + return None + + if not self._is_upgrade_allowed(req_to_install): + if self.upgrade_strategy == "only-if-needed": + return "already satisfied, skipping upgrade" + return "already satisfied" + + # Check for the possibility of an upgrade. For link-based + # requirements we have to pull the tree down and inspect to assess + # the version #, so it's handled way down. + if not req_to_install.link: + try: + self.finder.find_requirement(req_to_install, upgrade=True) + except BestVersionAlreadyInstalled: + # Then the best version is installed. + return "already up-to-date" + except DistributionNotFound: + # No distribution found, so we squash the error. It will + # be raised later when we re-try later to do the install. + # Why don't we just raise here? + pass + + self._set_req_to_reinstall(req_to_install) + return None + + def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]: + upgrade = self._is_upgrade_allowed(req) + best_candidate = self.finder.find_requirement(req, upgrade) + if not best_candidate: + return None + + # Log a warning per PEP 592 if necessary before returning. + link = best_candidate.link + if link.is_yanked: + reason = link.yanked_reason or "" + msg = ( + # Mark this as a unicode string to prevent + # "UnicodeEncodeError: 'ascii' codec can't encode character" + # in Python 2 when the reason contains non-ascii characters. + "The candidate selected for download or install is a " + "yanked version: {candidate}\n" + "Reason for being yanked: {reason}" + ).format(candidate=best_candidate, reason=reason) + logger.warning(msg) + + return link + + def _populate_link(self, req: InstallRequirement) -> None: + """Ensure that if a link can be found for this, that it is found. + + Note that req.link may still be None - if the requirement is already + installed and not needed to be upgraded based on the return value of + _is_upgrade_allowed(). + + If preparer.require_hashes is True, don't use the wheel cache, because + cached wheels, always built locally, have different hashes than the + files downloaded from the index server and thus throw false hash + mismatches. Furthermore, cached wheels at present have undeterministic + contents due to file modification times. + """ + if req.link is None: + req.link = self._find_requirement_link(req) + + if self.wheel_cache is None or self.preparer.require_hashes: + return + cache_entry = self.wheel_cache.get_cache_entry( + link=req.link, + package_name=req.name, + supported_tags=get_supported(), + ) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + if req.link is req.original_link and cache_entry.persistent: + req.original_link_is_in_wheel_cache = True + req.link = cache_entry.link + + def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution: + """Takes a InstallRequirement and returns a single AbstractDist \ + representing a prepared variant of the same. + """ + if req.editable: + return self.preparer.prepare_editable_requirement(req) + + # satisfied_by is only evaluated by calling _check_skip_installed, + # so it must be None here. + assert req.satisfied_by is None + skip_reason = self._check_skip_installed(req) + + if req.satisfied_by: + return self.preparer.prepare_installed_requirement(req, skip_reason) + + # We eagerly populate the link, since that's our "legacy" behavior. + self._populate_link(req) + dist = self.preparer.prepare_linked_requirement(req) + + # NOTE + # The following portion is for determining if a certain package is + # going to be re-installed/upgraded or not and reporting to the user. + # This should probably get cleaned up in a future refactor. + + # req.req is only avail after unpack for URL + # pkgs repeat check_if_exists to uninstall-on-upgrade + # (#14) + if not self.ignore_installed: + req.check_if_exists(self.use_user_site) + + if req.satisfied_by: + should_modify = ( + self.upgrade_strategy != "to-satisfy-only" + or self.force_reinstall + or self.ignore_installed + or req.link.scheme == "file" + ) + if should_modify: + self._set_req_to_reinstall(req) + else: + logger.info( + "Requirement already satisfied (use --upgrade to upgrade): %s", + req, + ) + return dist + + def _resolve_one( + self, + requirement_set: RequirementSet, + req_to_install: InstallRequirement, + ) -> List[InstallRequirement]: + """Prepare a single requirements file. + + :return: A list of additional InstallRequirements to also install. + """ + # Tell user what we are doing for this requirement: + # obtain (editable), skipping, processing (local url), collecting + # (remote url or package name) + if req_to_install.constraint or req_to_install.prepared: + return [] + + req_to_install.prepared = True + + # Parse and return dependencies + dist = self._get_dist_for(req_to_install) + # This will raise UnsupportedPythonVersion if the given Python + # version isn't compatible with the distribution's Requires-Python. + _check_dist_requires_python( + dist, + version_info=self._py_version_info, + ignore_requires_python=self.ignore_requires_python, + ) + + more_reqs: List[InstallRequirement] = [] + + def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None: + # This idiosyncratically converts the Requirement to str and let + # make_install_req then parse it again into Requirement. But this is + # the legacy resolver so I'm just not going to bother refactoring. + sub_install_req = self._make_install_req(str(subreq), req_to_install) + parent_req_name = req_to_install.name + to_scan_again, add_to_parent = requirement_set.add_requirement( + sub_install_req, + parent_req_name=parent_req_name, + extras_requested=extras_requested, + ) + if parent_req_name and add_to_parent: + self._discovered_dependencies[parent_req_name].append(add_to_parent) + more_reqs.extend(to_scan_again) + + with indent_log(): + # We add req_to_install before its dependencies, so that we + # can refer to it when adding dependencies. + if not requirement_set.has_requirement(req_to_install.name): + # 'unnamed' requirements will get added here + # 'unnamed' requirements can only come from being directly + # provided by the user. + assert req_to_install.user_supplied + requirement_set.add_requirement(req_to_install, parent_req_name=None) + + if not self.ignore_dependencies: + if req_to_install.extras: + logger.debug( + "Installing extra requirements: %r", + ",".join(req_to_install.extras), + ) + missing_requested = sorted( + set(req_to_install.extras) - set(dist.iter_provided_extras()) + ) + for missing in missing_requested: + logger.warning( + "%s %s does not provide the extra '%s'", + dist.raw_name, + dist.version, + missing, + ) + + available_requested = sorted( + set(dist.iter_provided_extras()) & set(req_to_install.extras) + ) + for subreq in dist.iter_dependencies(available_requested): + add_req(subreq, extras_requested=available_requested) + + return more_reqs + + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: + """Create the installation order. + + The installation order is topological - requirements are installed + before the requiring thing. We break cycles at an arbitrary point, + and make no other guarantees. + """ + # The current implementation, which we may change at any point + # installs the user specified things in the order given, except when + # dependencies must come earlier to achieve topological order. + order = [] + ordered_reqs: Set[InstallRequirement] = set() + + def schedule(req: InstallRequirement) -> None: + if req.satisfied_by or req in ordered_reqs: + return + if req.constraint: + return + ordered_reqs.add(req) + for dep in self._discovered_dependencies[req.name]: + schedule(dep) + order.append(req) + + for install_req in req_set.requirements.values(): + schedule(install_req) + return order diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..ea74e82 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..7bf6bef Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc new file mode 100644 index 0000000..1b40061 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc new file mode 100644 index 0000000..aefb39e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc new file mode 100644 index 0000000..510cb73 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc new file mode 100644 index 0000000..d29bf41 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc new file mode 100644 index 0000000..eb99dca Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc new file mode 100644 index 0000000..a51d1cc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc new file mode 100644 index 0000000..a226725 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/base.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/base.py new file mode 100644 index 0000000..b206692 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/base.py @@ -0,0 +1,141 @@ +from typing import FrozenSet, Iterable, Optional, Tuple, Union + +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.models.link import Link, links_equivalent +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.hashes import Hashes + +CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]] +CandidateVersion = Union[LegacyVersion, Version] + + +def format_name(project: str, extras: FrozenSet[str]) -> str: + if not extras: + return project + canonical_extras = sorted(canonicalize_name(e) for e in extras) + return "{}[{}]".format(project, ",".join(canonical_extras)) + + +class Constraint: + def __init__( + self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link] + ) -> None: + self.specifier = specifier + self.hashes = hashes + self.links = links + + @classmethod + def empty(cls) -> "Constraint": + return Constraint(SpecifierSet(), Hashes(), frozenset()) + + @classmethod + def from_ireq(cls, ireq: InstallRequirement) -> "Constraint": + links = frozenset([ireq.link]) if ireq.link else frozenset() + return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links) + + def __bool__(self) -> bool: + return bool(self.specifier) or bool(self.hashes) or bool(self.links) + + def __and__(self, other: InstallRequirement) -> "Constraint": + if not isinstance(other, InstallRequirement): + return NotImplemented + specifier = self.specifier & other.specifier + hashes = self.hashes & other.hashes(trust_internet=False) + links = self.links + if other.link: + links = links.union([other.link]) + return Constraint(specifier, hashes, links) + + def is_satisfied_by(self, candidate: "Candidate") -> bool: + # Reject if there are any mismatched URL constraints on this package. + if self.links and not all(_match_link(link, candidate) for link in self.links): + return False + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) + + +class Requirement: + @property + def project_name(self) -> NormalizedName: + """The "project name" of a requirement. + + This is different from ``name`` if this requirement contains extras, + in which case ``name`` would contain the ``[...]`` part, while this + refers to the name of the project. + """ + raise NotImplementedError("Subclass should override") + + @property + def name(self) -> str: + """The name identifying this requirement in the resolver. + + This is different from ``project_name`` if this requirement contains + extras, where ``project_name`` would not contain the ``[...]`` part. + """ + raise NotImplementedError("Subclass should override") + + def is_satisfied_by(self, candidate: "Candidate") -> bool: + return False + + def get_candidate_lookup(self) -> CandidateLookup: + raise NotImplementedError("Subclass should override") + + def format_for_error(self) -> str: + raise NotImplementedError("Subclass should override") + + +def _match_link(link: Link, candidate: "Candidate") -> bool: + if candidate.source_link: + return links_equivalent(link, candidate.source_link) + return False + + +class Candidate: + @property + def project_name(self) -> NormalizedName: + """The "project name" of the candidate. + + This is different from ``name`` if this candidate contains extras, + in which case ``name`` would contain the ``[...]`` part, while this + refers to the name of the project. + """ + raise NotImplementedError("Override in subclass") + + @property + def name(self) -> str: + """The name identifying this candidate in the resolver. + + This is different from ``project_name`` if this candidate contains + extras, where ``project_name`` would not contain the ``[...]`` part. + """ + raise NotImplementedError("Override in subclass") + + @property + def version(self) -> CandidateVersion: + raise NotImplementedError("Override in subclass") + + @property + def is_installed(self) -> bool: + raise NotImplementedError("Override in subclass") + + @property + def is_editable(self) -> bool: + raise NotImplementedError("Override in subclass") + + @property + def source_link(self) -> Optional[Link]: + raise NotImplementedError("Override in subclass") + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + raise NotImplementedError("Override in subclass") + + def get_install_requirement(self) -> Optional[InstallRequirement]: + raise NotImplementedError("Override in subclass") + + def format_for_error(self) -> str: + raise NotImplementedError("Subclass should override") diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/candidates.py new file mode 100644 index 0000000..60fad55 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/candidates.py @@ -0,0 +1,540 @@ +import logging +import sys +from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast + +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.exceptions import HashError, MetadataInconsistent +from pip._internal.metadata import BaseDistribution +from pip._internal.models.link import Link, links_equivalent +from pip._internal.models.wheel import Wheel +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.misc import normalize_version_info + +from .base import Candidate, CandidateVersion, Requirement, format_name + +if TYPE_CHECKING: + from .factory import Factory + +logger = logging.getLogger(__name__) + +BaseCandidate = Union[ + "AlreadyInstalledCandidate", + "EditableCandidate", + "LinkCandidate", +] + +# Avoid conflicting with the PyPI package "Python". +REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "") + + +def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]: + """The runtime version of BaseCandidate.""" + base_candidate_classes = ( + AlreadyInstalledCandidate, + EditableCandidate, + LinkCandidate, + ) + if isinstance(candidate, base_candidate_classes): + return candidate + return None + + +def make_install_req_from_link( + link: Link, template: InstallRequirement +) -> InstallRequirement: + assert not template.editable, "template is editable" + if template.req: + line = str(template.req) + else: + line = link.url + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options, + ), + ) + ireq.original_link = template.original_link + ireq.link = link + return ireq + + +def make_install_req_from_editable( + link: Link, template: InstallRequirement +) -> InstallRequirement: + assert template.editable, "template not editable" + return install_req_from_editable( + link.url, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + permit_editable_wheels=template.permit_editable_wheels, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options, + ), + ) + + +def _make_install_req_from_dist( + dist: BaseDistribution, template: InstallRequirement +) -> InstallRequirement: + from pip._internal.metadata.pkg_resources import Distribution as _Dist + + if template.req: + line = str(template.req) + elif template.link: + line = f"{dist.canonical_name} @ {template.link.url}" + else: + line = f"{dist.canonical_name}=={dist.version}" + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options, + ), + ) + ireq.satisfied_by = cast(_Dist, dist)._dist + return ireq + + +class _InstallRequirementBackedCandidate(Candidate): + """A candidate backed by an ``InstallRequirement``. + + This represents a package request with the target not being already + in the environment, and needs to be fetched and installed. The backing + ``InstallRequirement`` is responsible for most of the leg work; this + class exposes appropriate information to the resolver. + + :param link: The link passed to the ``InstallRequirement``. The backing + ``InstallRequirement`` will use this link to fetch the distribution. + :param source_link: The link this candidate "originates" from. This is + different from ``link`` when the link is found in the wheel cache. + ``link`` would point to the wheel cache, while this points to the + found remote link (e.g. from pypi.org). + """ + + dist: BaseDistribution + is_installed = False + + def __init__( + self, + link: Link, + source_link: Link, + ireq: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: + self._link = link + self._source_link = source_link + self._factory = factory + self._ireq = ireq + self._name = name + self._version = version + self.dist = self._prepare() + + def __str__(self) -> str: + return f"{self.name} {self.version}" + + def __repr__(self) -> str: + return "{class_name}({link!r})".format( + class_name=self.__class__.__name__, + link=str(self._link), + ) + + def __hash__(self) -> int: + return hash((self.__class__, self._link)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return links_equivalent(self._link, other._link) + return False + + @property + def source_link(self) -> Optional[Link]: + return self._source_link + + @property + def project_name(self) -> NormalizedName: + """The normalised name of the project the candidate refers to""" + if self._name is None: + self._name = self.dist.canonical_name + return self._name + + @property + def name(self) -> str: + return self.project_name + + @property + def version(self) -> CandidateVersion: + if self._version is None: + self._version = self.dist.version + return self._version + + def format_for_error(self) -> str: + return "{} {} (from {})".format( + self.name, + self.version, + self._link.file_path if self._link.is_file else self._link, + ) + + def _prepare_distribution(self) -> BaseDistribution: + raise NotImplementedError("Override in subclass") + + def _check_metadata_consistency(self, dist: BaseDistribution) -> None: + """Check for consistency of project name and version of dist.""" + if self._name is not None and self._name != dist.canonical_name: + raise MetadataInconsistent( + self._ireq, + "name", + self._name, + dist.canonical_name, + ) + if self._version is not None and self._version != dist.version: + raise MetadataInconsistent( + self._ireq, + "version", + str(self._version), + str(dist.version), + ) + + def _prepare(self) -> BaseDistribution: + try: + dist = self._prepare_distribution() + except HashError as e: + # Provide HashError the underlying ireq that caused it. This + # provides context for the resulting error message to show the + # offending line to the user. + e.req = self._ireq + raise + self._check_metadata_consistency(dist) + return dist + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + requires = self.dist.iter_dependencies() if with_requires else () + for r in requires: + yield self._factory.make_requirement_from_spec(str(r), self._ireq) + yield self._factory.make_requires_python_requirement(self.dist.requires_python) + + def get_install_requirement(self) -> Optional[InstallRequirement]: + return self._ireq + + +class LinkCandidate(_InstallRequirementBackedCandidate): + is_editable = False + + def __init__( + self, + link: Link, + template: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: + source_link = link + cache_entry = factory.get_wheel_cache_entry(link, name) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + link = cache_entry.link + ireq = make_install_req_from_link(link, template) + assert ireq.link == link + if ireq.link.is_wheel and not ireq.link.is_file: + wheel = Wheel(ireq.link.filename) + wheel_name = canonicalize_name(wheel.name) + assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel" + # Version may not be present for PEP 508 direct URLs + if version is not None: + wheel_version = Version(wheel.version) + assert version == wheel_version, "{!r} != {!r} for wheel {}".format( + version, wheel_version, name + ) + + if ( + cache_entry is not None + and cache_entry.persistent + and template.link is template.original_link + ): + ireq.original_link_is_in_wheel_cache = True + + super().__init__( + link=link, + source_link=source_link, + ireq=ireq, + factory=factory, + name=name, + version=version, + ) + + def _prepare_distribution(self) -> BaseDistribution: + preparer = self._factory.preparer + return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True) + + +class EditableCandidate(_InstallRequirementBackedCandidate): + is_editable = True + + def __init__( + self, + link: Link, + template: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: + super().__init__( + link=link, + source_link=link, + ireq=make_install_req_from_editable(link, template), + factory=factory, + name=name, + version=version, + ) + + def _prepare_distribution(self) -> BaseDistribution: + return self._factory.preparer.prepare_editable_requirement(self._ireq) + + +class AlreadyInstalledCandidate(Candidate): + is_installed = True + source_link = None + + def __init__( + self, + dist: BaseDistribution, + template: InstallRequirement, + factory: "Factory", + ) -> None: + self.dist = dist + self._ireq = _make_install_req_from_dist(dist, template) + self._factory = factory + + # This is just logging some messages, so we can do it eagerly. + # The returned dist would be exactly the same as self.dist because we + # set satisfied_by in _make_install_req_from_dist. + # TODO: Supply reason based on force_reinstall and upgrade_strategy. + skip_reason = "already satisfied" + factory.preparer.prepare_installed_requirement(self._ireq, skip_reason) + + def __str__(self) -> str: + return str(self.dist) + + def __repr__(self) -> str: + return "{class_name}({distribution!r})".format( + class_name=self.__class__.__name__, + distribution=self.dist, + ) + + def __hash__(self) -> int: + return hash((self.__class__, self.name, self.version)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return self.name == other.name and self.version == other.version + return False + + @property + def project_name(self) -> NormalizedName: + return self.dist.canonical_name + + @property + def name(self) -> str: + return self.project_name + + @property + def version(self) -> CandidateVersion: + return self.dist.version + + @property + def is_editable(self) -> bool: + return self.dist.editable + + def format_for_error(self) -> str: + return f"{self.name} {self.version} (Installed)" + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + if not with_requires: + return + for r in self.dist.iter_dependencies(): + yield self._factory.make_requirement_from_spec(str(r), self._ireq) + + def get_install_requirement(self) -> Optional[InstallRequirement]: + return None + + +class ExtrasCandidate(Candidate): + """A candidate that has 'extras', indicating additional dependencies. + + Requirements can be for a project with dependencies, something like + foo[extra]. The extras don't affect the project/version being installed + directly, but indicate that we need additional dependencies. We model that + by having an artificial ExtrasCandidate that wraps the "base" candidate. + + The ExtrasCandidate differs from the base in the following ways: + + 1. It has a unique name, of the form foo[extra]. This causes the resolver + to treat it as a separate node in the dependency graph. + 2. When we're getting the candidate's dependencies, + a) We specify that we want the extra dependencies as well. + b) We add a dependency on the base candidate. + See below for why this is needed. + 3. We return None for the underlying InstallRequirement, as the base + candidate will provide it, and we don't want to end up with duplicates. + + The dependency on the base candidate is needed so that the resolver can't + decide that it should recommend foo[extra1] version 1.0 and foo[extra2] + version 2.0. Having those candidates depend on foo=1.0 and foo=2.0 + respectively forces the resolver to recognise that this is a conflict. + """ + + def __init__( + self, + base: BaseCandidate, + extras: FrozenSet[str], + ) -> None: + self.base = base + self.extras = extras + + def __str__(self) -> str: + name, rest = str(self.base).split(" ", 1) + return "{}[{}] {}".format(name, ",".join(self.extras), rest) + + def __repr__(self) -> str: + return "{class_name}(base={base!r}, extras={extras!r})".format( + class_name=self.__class__.__name__, + base=self.base, + extras=self.extras, + ) + + def __hash__(self) -> int: + return hash((self.base, self.extras)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return self.base == other.base and self.extras == other.extras + return False + + @property + def project_name(self) -> NormalizedName: + return self.base.project_name + + @property + def name(self) -> str: + """The normalised name of the project the candidate refers to""" + return format_name(self.base.project_name, self.extras) + + @property + def version(self) -> CandidateVersion: + return self.base.version + + def format_for_error(self) -> str: + return "{} [{}]".format( + self.base.format_for_error(), ", ".join(sorted(self.extras)) + ) + + @property + def is_installed(self) -> bool: + return self.base.is_installed + + @property + def is_editable(self) -> bool: + return self.base.is_editable + + @property + def source_link(self) -> Optional[Link]: + return self.base.source_link + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + factory = self.base._factory + + # Add a dependency on the exact base + # (See note 2b in the class docstring) + yield factory.make_requirement_from_candidate(self.base) + if not with_requires: + return + + # The user may have specified extras that the candidate doesn't + # support. We ignore any unsupported extras here. + valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras()) + invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras()) + for extra in sorted(invalid_extras): + logger.warning( + "%s %s does not provide the extra '%s'", + self.base.name, + self.version, + extra, + ) + + for r in self.base.dist.iter_dependencies(valid_extras): + requirement = factory.make_requirement_from_spec( + str(r), self.base._ireq, valid_extras + ) + if requirement: + yield requirement + + def get_install_requirement(self) -> Optional[InstallRequirement]: + # We don't return anything here, because we always + # depend on the base candidate, and we'll get the + # install requirement from that. + return None + + +class RequiresPythonCandidate(Candidate): + is_installed = False + source_link = None + + def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None: + if py_version_info is not None: + version_info = normalize_version_info(py_version_info) + else: + version_info = sys.version_info[:3] + self._version = Version(".".join(str(c) for c in version_info)) + + # We don't need to implement __eq__() and __ne__() since there is always + # only one RequiresPythonCandidate in a resolution, i.e. the host Python. + # The built-in object.__eq__() and object.__ne__() do exactly what we want. + + def __str__(self) -> str: + return f"Python {self._version}" + + @property + def project_name(self) -> NormalizedName: + return REQUIRES_PYTHON_IDENTIFIER + + @property + def name(self) -> str: + return REQUIRES_PYTHON_IDENTIFIER + + @property + def version(self) -> CandidateVersion: + return self._version + + def format_for_error(self) -> str: + return f"Python {self.version}" + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + return () + + def get_install_requirement(self) -> Optional[InstallRequirement]: + return None diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/factory.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/factory.py new file mode 100644 index 0000000..766dc26 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/factory.py @@ -0,0 +1,701 @@ +import contextlib +import functools +import logging +from typing import ( + TYPE_CHECKING, + Dict, + FrozenSet, + Iterable, + Iterator, + List, + Mapping, + NamedTuple, + Optional, + Sequence, + Set, + Tuple, + TypeVar, + cast, +) + +from pip._vendor.packaging.requirements import InvalidRequirement +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.resolvelib import ResolutionImpossible + +from pip._internal.cache import CacheEntry, WheelCache +from pip._internal.exceptions import ( + DistributionNotFound, + InstallationError, + InstallationSubprocessError, + MetadataInconsistent, + UnsupportedPythonVersion, + UnsupportedWheel, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution, get_default_environment +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import install_req_from_link_and_ireq +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) +from pip._internal.resolution.base import InstallRequirementProvider +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.packaging import get_requirement +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import Candidate, CandidateVersion, Constraint, Requirement +from .candidates import ( + AlreadyInstalledCandidate, + BaseCandidate, + EditableCandidate, + ExtrasCandidate, + LinkCandidate, + RequiresPythonCandidate, + as_base_candidate, +) +from .found_candidates import FoundCandidates, IndexCandidateInfo +from .requirements import ( + ExplicitRequirement, + RequiresPythonRequirement, + SpecifierRequirement, + UnsatisfiableRequirement, +) + +if TYPE_CHECKING: + from typing import Protocol + + class ConflictCause(Protocol): + requirement: RequiresPythonRequirement + parent: Candidate + + +logger = logging.getLogger(__name__) + +C = TypeVar("C") +Cache = Dict[Link, C] + + +class CollectedRootRequirements(NamedTuple): + requirements: List[Requirement] + constraints: Dict[str, Constraint] + user_requested: Dict[str, int] + + +class Factory: + def __init__( + self, + finder: PackageFinder, + preparer: RequirementPreparer, + make_install_req: InstallRequirementProvider, + wheel_cache: Optional[WheelCache], + use_user_site: bool, + force_reinstall: bool, + ignore_installed: bool, + ignore_requires_python: bool, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> None: + self._finder = finder + self.preparer = preparer + self._wheel_cache = wheel_cache + self._python_candidate = RequiresPythonCandidate(py_version_info) + self._make_install_req_from_spec = make_install_req + self._use_user_site = use_user_site + self._force_reinstall = force_reinstall + self._ignore_requires_python = ignore_requires_python + + self._build_failures: Cache[InstallationError] = {} + self._link_candidate_cache: Cache[LinkCandidate] = {} + self._editable_candidate_cache: Cache[EditableCandidate] = {} + self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {} + self._extras_candidate_cache: Dict[ + Tuple[int, FrozenSet[str]], ExtrasCandidate + ] = {} + + if not ignore_installed: + env = get_default_environment() + self._installed_dists = { + dist.canonical_name: dist + for dist in env.iter_installed_distributions(local_only=False) + } + else: + self._installed_dists = {} + + @property + def force_reinstall(self) -> bool: + return self._force_reinstall + + def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: + if not link.is_wheel: + return + wheel = Wheel(link.filename) + if wheel.supported(self._finder.target_python.get_tags()): + return + msg = f"{link.filename} is not a supported wheel on this platform." + raise UnsupportedWheel(msg) + + def _make_extras_candidate( + self, base: BaseCandidate, extras: FrozenSet[str] + ) -> ExtrasCandidate: + cache_key = (id(base), extras) + try: + candidate = self._extras_candidate_cache[cache_key] + except KeyError: + candidate = ExtrasCandidate(base, extras) + self._extras_candidate_cache[cache_key] = candidate + return candidate + + def _make_candidate_from_dist( + self, + dist: BaseDistribution, + extras: FrozenSet[str], + template: InstallRequirement, + ) -> Candidate: + try: + base = self._installed_candidate_cache[dist.canonical_name] + except KeyError: + base = AlreadyInstalledCandidate(dist, template, factory=self) + self._installed_candidate_cache[dist.canonical_name] = base + if not extras: + return base + return self._make_extras_candidate(base, extras) + + def _make_candidate_from_link( + self, + link: Link, + extras: FrozenSet[str], + template: InstallRequirement, + name: Optional[NormalizedName], + version: Optional[CandidateVersion], + ) -> Optional[Candidate]: + # TODO: Check already installed candidate, and use it if the link and + # editable flag match. + + if link in self._build_failures: + # We already tried this candidate before, and it does not build. + # Don't bother trying again. + return None + + if template.editable: + if link not in self._editable_candidate_cache: + try: + self._editable_candidate_cache[link] = EditableCandidate( + link, + template, + factory=self, + name=name, + version=version, + ) + except (InstallationSubprocessError, MetadataInconsistent) as e: + logger.warning("Discarding %s. %s", link, e) + self._build_failures[link] = e + return None + base: BaseCandidate = self._editable_candidate_cache[link] + else: + if link not in self._link_candidate_cache: + try: + self._link_candidate_cache[link] = LinkCandidate( + link, + template, + factory=self, + name=name, + version=version, + ) + except (InstallationSubprocessError, MetadataInconsistent) as e: + logger.warning("Discarding %s. %s", link, e) + self._build_failures[link] = e + return None + base = self._link_candidate_cache[link] + + if not extras: + return base + return self._make_extras_candidate(base, extras) + + def _iter_found_candidates( + self, + ireqs: Sequence[InstallRequirement], + specifier: SpecifierSet, + hashes: Hashes, + prefers_installed: bool, + incompatible_ids: Set[int], + ) -> Iterable[Candidate]: + if not ireqs: + return () + + # The InstallRequirement implementation requires us to give it a + # "template". Here we just choose the first requirement to represent + # all of them. + # Hopefully the Project model can correct this mismatch in the future. + template = ireqs[0] + assert template.req, "Candidates found on index must be PEP 508" + name = canonicalize_name(template.req.name) + + extras: FrozenSet[str] = frozenset() + for ireq in ireqs: + assert ireq.req, "Candidates found on index must be PEP 508" + specifier &= ireq.req.specifier + hashes &= ireq.hashes(trust_internet=False) + extras |= frozenset(ireq.extras) + + def _get_installed_candidate() -> Optional[Candidate]: + """Get the candidate for the currently-installed version.""" + # If --force-reinstall is set, we want the version from the index + # instead, so we "pretend" there is nothing installed. + if self._force_reinstall: + return None + try: + installed_dist = self._installed_dists[name] + except KeyError: + return None + # Don't use the installed distribution if its version does not fit + # the current dependency graph. + if not specifier.contains(installed_dist.version, prereleases=True): + return None + candidate = self._make_candidate_from_dist( + dist=installed_dist, + extras=extras, + template=template, + ) + # The candidate is a known incompatiblity. Don't use it. + if id(candidate) in incompatible_ids: + return None + return candidate + + def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]: + result = self._finder.find_best_candidate( + project_name=name, + specifier=specifier, + hashes=hashes, + ) + icans = list(result.iter_applicable()) + + # PEP 592: Yanked releases must be ignored unless only yanked + # releases can satisfy the version range. So if this is false, + # all yanked icans need to be skipped. + all_yanked = all(ican.link.is_yanked for ican in icans) + + # PackageFinder returns earlier versions first, so we reverse. + for ican in reversed(icans): + if not all_yanked and ican.link.is_yanked: + continue + func = functools.partial( + self._make_candidate_from_link, + link=ican.link, + extras=extras, + template=template, + name=name, + version=ican.version, + ) + yield ican.version, func + + return FoundCandidates( + iter_index_candidate_infos, + _get_installed_candidate(), + prefers_installed, + incompatible_ids, + ) + + def _iter_explicit_candidates_from_base( + self, + base_requirements: Iterable[Requirement], + extras: FrozenSet[str], + ) -> Iterator[Candidate]: + """Produce explicit candidates from the base given an extra-ed package. + + :param base_requirements: Requirements known to the resolver. The + requirements are guaranteed to not have extras. + :param extras: The extras to inject into the explicit requirements' + candidates. + """ + for req in base_requirements: + lookup_cand, _ = req.get_candidate_lookup() + if lookup_cand is None: # Not explicit. + continue + # We've stripped extras from the identifier, and should always + # get a BaseCandidate here, unless there's a bug elsewhere. + base_cand = as_base_candidate(lookup_cand) + assert base_cand is not None, "no extras here" + yield self._make_extras_candidate(base_cand, extras) + + def _iter_candidates_from_constraints( + self, + identifier: str, + constraint: Constraint, + template: InstallRequirement, + ) -> Iterator[Candidate]: + """Produce explicit candidates from constraints. + + This creates "fake" InstallRequirement objects that are basically clones + of what "should" be the template, but with original_link set to link. + """ + for link in constraint.links: + self._fail_if_link_is_unsupported_wheel(link) + candidate = self._make_candidate_from_link( + link, + extras=frozenset(), + template=install_req_from_link_and_ireq(link, template), + name=canonicalize_name(identifier), + version=None, + ) + if candidate: + yield candidate + + def find_candidates( + self, + identifier: str, + requirements: Mapping[str, Iterable[Requirement]], + incompatibilities: Mapping[str, Iterator[Candidate]], + constraint: Constraint, + prefers_installed: bool, + ) -> Iterable[Candidate]: + # Collect basic lookup information from the requirements. + explicit_candidates: Set[Candidate] = set() + ireqs: List[InstallRequirement] = [] + for req in requirements[identifier]: + cand, ireq = req.get_candidate_lookup() + if cand is not None: + explicit_candidates.add(cand) + if ireq is not None: + ireqs.append(ireq) + + # If the current identifier contains extras, add explicit candidates + # from entries from extra-less identifier. + with contextlib.suppress(InvalidRequirement): + parsed_requirement = get_requirement(identifier) + explicit_candidates.update( + self._iter_explicit_candidates_from_base( + requirements.get(parsed_requirement.name, ()), + frozenset(parsed_requirement.extras), + ), + ) + + # Add explicit candidates from constraints. We only do this if there are + # kown ireqs, which represent requirements not already explicit. If + # there are no ireqs, we're constraining already-explicit requirements, + # which is handled later when we return the explicit candidates. + if ireqs: + try: + explicit_candidates.update( + self._iter_candidates_from_constraints( + identifier, + constraint, + template=ireqs[0], + ), + ) + except UnsupportedWheel: + # If we're constrained to install a wheel incompatible with the + # target architecture, no candidates will ever be valid. + return () + + # Since we cache all the candidates, incompatibility identification + # can be made quicker by comparing only the id() values. + incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())} + + # If none of the requirements want an explicit candidate, we can ask + # the finder for candidates. + if not explicit_candidates: + return self._iter_found_candidates( + ireqs, + constraint.specifier, + constraint.hashes, + prefers_installed, + incompat_ids, + ) + + return ( + c + for c in explicit_candidates + if id(c) not in incompat_ids + and constraint.is_satisfied_by(c) + and all(req.is_satisfied_by(c) for req in requirements[identifier]) + ) + + def _make_requirement_from_install_req( + self, ireq: InstallRequirement, requested_extras: Iterable[str] + ) -> Optional[Requirement]: + if not ireq.match_markers(requested_extras): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + ireq.name, + ireq.markers, + ) + return None + if not ireq.link: + return SpecifierRequirement(ireq) + self._fail_if_link_is_unsupported_wheel(ireq.link) + cand = self._make_candidate_from_link( + ireq.link, + extras=frozenset(ireq.extras), + template=ireq, + name=canonicalize_name(ireq.name) if ireq.name else None, + version=None, + ) + if cand is None: + # There's no way we can satisfy a URL requirement if the underlying + # candidate fails to build. An unnamed URL must be user-supplied, so + # we fail eagerly. If the URL is named, an unsatisfiable requirement + # can make the resolver do the right thing, either backtrack (and + # maybe find some other requirement that's buildable) or raise a + # ResolutionImpossible eventually. + if not ireq.name: + raise self._build_failures[ireq.link] + return UnsatisfiableRequirement(canonicalize_name(ireq.name)) + return self.make_requirement_from_candidate(cand) + + def collect_root_requirements( + self, root_ireqs: List[InstallRequirement] + ) -> CollectedRootRequirements: + collected = CollectedRootRequirements([], {}, {}) + for i, ireq in enumerate(root_ireqs): + if ireq.constraint: + # Ensure we only accept valid constraints + problem = check_invalid_constraint_type(ireq) + if problem: + raise InstallationError(problem) + if not ireq.match_markers(): + continue + assert ireq.name, "Constraint must be named" + name = canonicalize_name(ireq.name) + if name in collected.constraints: + collected.constraints[name] &= ireq + else: + collected.constraints[name] = Constraint.from_ireq(ireq) + else: + req = self._make_requirement_from_install_req( + ireq, + requested_extras=(), + ) + if req is None: + continue + if ireq.user_supplied and req.name not in collected.user_requested: + collected.user_requested[req.name] = i + collected.requirements.append(req) + return collected + + def make_requirement_from_candidate( + self, candidate: Candidate + ) -> ExplicitRequirement: + return ExplicitRequirement(candidate) + + def make_requirement_from_spec( + self, + specifier: str, + comes_from: Optional[InstallRequirement], + requested_extras: Iterable[str] = (), + ) -> Optional[Requirement]: + ireq = self._make_install_req_from_spec(specifier, comes_from) + return self._make_requirement_from_install_req(ireq, requested_extras) + + def make_requires_python_requirement( + self, + specifier: SpecifierSet, + ) -> Optional[Requirement]: + if self._ignore_requires_python: + return None + # Don't bother creating a dependency for an empty Requires-Python. + if not str(specifier): + return None + return RequiresPythonRequirement(specifier, self._python_candidate) + + def get_wheel_cache_entry( + self, link: Link, name: Optional[str] + ) -> Optional[CacheEntry]: + """Look up the link in the wheel cache. + + If ``preparer.require_hashes`` is True, don't use the wheel cache, + because cached wheels, always built locally, have different hashes + than the files downloaded from the index server and thus throw false + hash mismatches. Furthermore, cached wheels at present have + nondeterministic contents due to file modification times. + """ + if self._wheel_cache is None or self.preparer.require_hashes: + return None + return self._wheel_cache.get_cache_entry( + link=link, + package_name=name, + supported_tags=get_supported(), + ) + + def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]: + # TODO: Are there more cases this needs to return True? Editable? + dist = self._installed_dists.get(candidate.project_name) + if dist is None: # Not installed, no uninstallation required. + return None + + # We're installing into global site. The current installation must + # be uninstalled, no matter it's in global or user site, because the + # user site installation has precedence over global. + if not self._use_user_site: + return dist + + # We're installing into user site. Remove the user site installation. + if dist.in_usersite: + return dist + + # We're installing into user site, but the installed incompatible + # package is in global site. We can't uninstall that, and would let + # the new user installation to "shadow" it. But shadowing won't work + # in virtual environments, so we error out. + if running_under_virtualenv() and dist.in_site_packages: + message = ( + f"Will not install to the user site because it will lack " + f"sys.path precedence to {dist.raw_name} in {dist.location}" + ) + raise InstallationError(message) + return None + + def _report_requires_python_error( + self, causes: Sequence["ConflictCause"] + ) -> UnsupportedPythonVersion: + assert causes, "Requires-Python error reported with no cause" + + version = self._python_candidate.version + + if len(causes) == 1: + specifier = str(causes[0].requirement.specifier) + message = ( + f"Package {causes[0].parent.name!r} requires a different " + f"Python: {version} not in {specifier!r}" + ) + return UnsupportedPythonVersion(message) + + message = f"Packages require a different Python. {version} not in:" + for cause in causes: + package = cause.parent.format_for_error() + specifier = str(cause.requirement.specifier) + message += f"\n{specifier!r} (required by {package})" + return UnsupportedPythonVersion(message) + + def _report_single_requirement_conflict( + self, req: Requirement, parent: Optional[Candidate] + ) -> DistributionNotFound: + if parent is None: + req_disp = str(req) + else: + req_disp = f"{req} (from {parent.name})" + + cands = self._finder.find_all_candidates(req.project_name) + versions = [str(v) for v in sorted({c.version for c in cands})] + + logger.critical( + "Could not find a version that satisfies the requirement %s " + "(from versions: %s)", + req_disp, + ", ".join(versions) or "none", + ) + if str(req) == "requirements.txt": + logger.info( + "HINT: You are attempting to install a package literally " + 'named "requirements.txt" (which cannot exist). Consider ' + "using the '-r' flag to install the packages listed in " + "requirements.txt" + ) + + return DistributionNotFound(f"No matching distribution found for {req}") + + def get_installation_error( + self, + e: "ResolutionImpossible[Requirement, Candidate]", + constraints: Dict[str, Constraint], + ) -> InstallationError: + + assert e.causes, "Installation error reported with no cause" + + # If one of the things we can't solve is "we need Python X.Y", + # that is what we report. + requires_python_causes = [ + cause + for cause in e.causes + if isinstance(cause.requirement, RequiresPythonRequirement) + and not cause.requirement.is_satisfied_by(self._python_candidate) + ] + if requires_python_causes: + # The comprehension above makes sure all Requirement instances are + # RequiresPythonRequirement, so let's cast for convinience. + return self._report_requires_python_error( + cast("Sequence[ConflictCause]", requires_python_causes), + ) + + # Otherwise, we have a set of causes which can't all be satisfied + # at once. + + # The simplest case is when we have *one* cause that can't be + # satisfied. We just report that case. + if len(e.causes) == 1: + req, parent = e.causes[0] + if req.name not in constraints: + return self._report_single_requirement_conflict(req, parent) + + # OK, we now have a list of requirements that can't all be + # satisfied at once. + + # A couple of formatting helpers + def text_join(parts: List[str]) -> str: + if len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def describe_trigger(parent: Candidate) -> str: + ireq = parent.get_install_requirement() + if not ireq or not ireq.comes_from: + return f"{parent.name}=={parent.version}" + if isinstance(ireq.comes_from, InstallRequirement): + return str(ireq.comes_from.name) + return str(ireq.comes_from) + + triggers = set() + for req, parent in e.causes: + if parent is None: + # This is a root requirement, so we can report it directly + trigger = req.format_for_error() + else: + trigger = describe_trigger(parent) + triggers.add(trigger) + + if triggers: + info = text_join(sorted(triggers)) + else: + info = "the requested packages" + + msg = ( + "Cannot install {} because these package versions " + "have conflicting dependencies.".format(info) + ) + logger.critical(msg) + msg = "\nThe conflict is caused by:" + + relevant_constraints = set() + for req, parent in e.causes: + if req.name in constraints: + relevant_constraints.add(req.name) + msg = msg + "\n " + if parent: + msg = msg + f"{parent.name} {parent.version} depends on " + else: + msg = msg + "The user requested " + msg = msg + req.format_for_error() + for key in relevant_constraints: + spec = constraints[key].specifier + msg += f"\n The user requested (constraint) {key}{spec}" + + msg = ( + msg + + "\n\n" + + "To fix this you could try to:\n" + + "1. loosen the range of package versions you've specified\n" + + "2. remove package versions to allow pip attempt to solve " + + "the dependency conflict\n" + ) + + logger.info(msg) + + return DistributionNotFound( + "ResolutionImpossible: for help visit " + "https://pip.pypa.io/en/latest/user_guide/" + "#fixing-conflicting-dependencies" + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py new file mode 100644 index 0000000..8663097 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py @@ -0,0 +1,155 @@ +"""Utilities to lazily create and visit candidates found. + +Creating and visiting a candidate is a *very* costly operation. It involves +fetching, extracting, potentially building modules from source, and verifying +distribution metadata. It is therefore crucial for performance to keep +everything here lazy all the way down, so we only touch candidates that we +absolutely need, and not "download the world" when we only need one version of +something. +""" + +import functools +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple + +from pip._vendor.packaging.version import _BaseVersion + +from .base import Candidate + +IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]] + +if TYPE_CHECKING: + SequenceCandidate = Sequence[Candidate] +else: + # For compatibility: Python before 3.9 does not support using [] on the + # Sequence class. + # + # >>> from collections.abc import Sequence + # >>> Sequence[str] + # Traceback (most recent call last): + # File "", line 1, in + # TypeError: 'ABCMeta' object is not subscriptable + # + # TODO: Remove this block after dropping Python 3.8 support. + SequenceCandidate = Sequence + + +def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the package is not already installed. Candidates + from index come later in their normal ordering. + """ + versions_found: Set[_BaseVersion] = set() + for version, func in infos: + if version in versions_found: + continue + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + +def _iter_built_with_prepended( + installed: Candidate, infos: Iterator[IndexCandidateInfo] +) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the resolver prefers the already-installed + candidate and NOT to upgrade. The installed candidate is therefore + always yielded first, and candidates from index come later in their + normal ordering, except skipped when the version is already installed. + """ + yield installed + versions_found: Set[_BaseVersion] = {installed.version} + for version, func in infos: + if version in versions_found: + continue + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + +def _iter_built_with_inserted( + installed: Candidate, infos: Iterator[IndexCandidateInfo] +) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the resolver prefers to upgrade an + already-installed package. Candidates from index are returned in their + normal ordering, except replaced when the version is already installed. + + The implementation iterates through and yields other candidates, inserting + the installed candidate exactly once before we start yielding older or + equivalent candidates, or after all other candidates if they are all newer. + """ + versions_found: Set[_BaseVersion] = set() + for version, func in infos: + if version in versions_found: + continue + # If the installed candidate is better, yield it first. + if installed.version >= version: + yield installed + versions_found.add(installed.version) + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + # If the installed candidate is older than all other candidates. + if installed.version not in versions_found: + yield installed + + +class FoundCandidates(SequenceCandidate): + """A lazy sequence to provide candidates to the resolver. + + The intended usage is to return this from `find_matches()` so the resolver + can iterate through the sequence multiple times, but only access the index + page when remote packages are actually needed. This improve performances + when suitable candidates are already installed on disk. + """ + + def __init__( + self, + get_infos: Callable[[], Iterator[IndexCandidateInfo]], + installed: Optional[Candidate], + prefers_installed: bool, + incompatible_ids: Set[int], + ): + self._get_infos = get_infos + self._installed = installed + self._prefers_installed = prefers_installed + self._incompatible_ids = incompatible_ids + + def __getitem__(self, index: Any) -> Any: + # Implemented to satisfy the ABC check. This is not needed by the + # resolver, and should not be used by the provider either (for + # performance reasons). + raise NotImplementedError("don't do this") + + def __iter__(self) -> Iterator[Candidate]: + infos = self._get_infos() + if not self._installed: + iterator = _iter_built(infos) + elif self._prefers_installed: + iterator = _iter_built_with_prepended(self._installed, infos) + else: + iterator = _iter_built_with_inserted(self._installed, infos) + return (c for c in iterator if id(c) not in self._incompatible_ids) + + def __len__(self) -> int: + # Implemented to satisfy the ABC check. This is not needed by the + # resolver, and should not be used by the provider either (for + # performance reasons). + raise NotImplementedError("don't do this") + + @functools.lru_cache(maxsize=1) + def __bool__(self) -> bool: + if self._prefers_installed and self._installed: + return True + return any(self) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/provider.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/provider.py new file mode 100644 index 0000000..85d3b31 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/provider.py @@ -0,0 +1,215 @@ +import collections +import math +from typing import TYPE_CHECKING, Dict, Iterable, Iterator, Mapping, Sequence, Union + +from pip._vendor.resolvelib.providers import AbstractProvider + +from .base import Candidate, Constraint, Requirement +from .candidates import REQUIRES_PYTHON_IDENTIFIER +from .factory import Factory + +if TYPE_CHECKING: + from pip._vendor.resolvelib.providers import Preference + from pip._vendor.resolvelib.resolvers import RequirementInformation + + PreferenceInformation = RequirementInformation[Requirement, Candidate] + + _ProviderBase = AbstractProvider[Requirement, Candidate, str] +else: + _ProviderBase = AbstractProvider + +# Notes on the relationship between the provider, the factory, and the +# candidate and requirement classes. +# +# The provider is a direct implementation of the resolvelib class. Its role +# is to deliver the API that resolvelib expects. +# +# Rather than work with completely abstract "requirement" and "candidate" +# concepts as resolvelib does, pip has concrete classes implementing these two +# ideas. The API of Requirement and Candidate objects are defined in the base +# classes, but essentially map fairly directly to the equivalent provider +# methods. In particular, `find_matches` and `is_satisfied_by` are +# requirement methods, and `get_dependencies` is a candidate method. +# +# The factory is the interface to pip's internal mechanisms. It is stateless, +# and is created by the resolver and held as a property of the provider. It is +# responsible for creating Requirement and Candidate objects, and provides +# services to those objects (access to pip's finder and preparer). + + +class PipProvider(_ProviderBase): + """Pip's provider implementation for resolvelib. + + :params constraints: A mapping of constraints specified by the user. Keys + are canonicalized project names. + :params ignore_dependencies: Whether the user specified ``--no-deps``. + :params upgrade_strategy: The user-specified upgrade strategy. + :params user_requested: A set of canonicalized package names that the user + supplied for pip to install/upgrade. + """ + + def __init__( + self, + factory: Factory, + constraints: Dict[str, Constraint], + ignore_dependencies: bool, + upgrade_strategy: str, + user_requested: Dict[str, int], + ) -> None: + self._factory = factory + self._constraints = constraints + self._ignore_dependencies = ignore_dependencies + self._upgrade_strategy = upgrade_strategy + self._user_requested = user_requested + self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf) + + def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str: + return requirement_or_candidate.name + + def get_preference( # type: ignore + self, + identifier: str, + resolutions: Mapping[str, Candidate], + candidates: Mapping[str, Iterator[Candidate]], + information: Mapping[str, Iterable["PreferenceInformation"]], + backtrack_causes: Sequence["PreferenceInformation"], + ) -> "Preference": + """Produce a sort key for given requirement based on preference. + + The lower the return value is, the more preferred this group of + arguments is. + + Currently pip considers the followings in order: + + * Prefer if any of the known requirements is "direct", e.g. points to an + explicit URL. + * If equal, prefer if any requirement is "pinned", i.e. contains + operator ``===`` or ``==``. + * If equal, calculate an approximate "depth" and resolve requirements + closer to the user-specified requirements first. + * Order user-specified requirements by the order they are specified. + * If equal, prefers "non-free" requirements, i.e. contains at least one + operator, such as ``>=`` or ``<``. + * If equal, order alphabetically for consistency (helps debuggability). + """ + lookups = (r.get_candidate_lookup() for r, _ in information[identifier]) + candidate, ireqs = zip(*lookups) + operators = [ + specifier.operator + for specifier_set in (ireq.specifier for ireq in ireqs if ireq) + for specifier in specifier_set + ] + + direct = candidate is not None + pinned = any(op[:2] == "==" for op in operators) + unfree = bool(operators) + + try: + requested_order: Union[int, float] = self._user_requested[identifier] + except KeyError: + requested_order = math.inf + parent_depths = ( + self._known_depths[parent.name] if parent is not None else 0.0 + for _, parent in information[identifier] + ) + inferred_depth = min(d for d in parent_depths) + 1.0 + else: + inferred_depth = 1.0 + self._known_depths[identifier] = inferred_depth + + requested_order = self._user_requested.get(identifier, math.inf) + + # Requires-Python has only one candidate and the check is basically + # free, so we always do it first to avoid needless work if it fails. + requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER + + # HACK: Setuptools have a very long and solid backward compatibility + # track record, and extremely few projects would request a narrow, + # non-recent version range of it since that would break a lot things. + # (Most projects specify it only to request for an installer feature, + # which does not work, but that's another topic.) Intentionally + # delaying Setuptools helps reduce branches the resolver has to check. + # This serves as a temporary fix for issues like "apache-airlfow[all]" + # while we work on "proper" branch pruning techniques. + delay_this = identifier == "setuptools" + + # Prefer the causes of backtracking on the assumption that the problem + # resolving the dependency tree is related to the failures that caused + # the backtracking + backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes) + + return ( + not requires_python, + delay_this, + not direct, + not pinned, + not backtrack_cause, + inferred_depth, + requested_order, + not unfree, + identifier, + ) + + def _get_constraint(self, identifier: str) -> Constraint: + if identifier in self._constraints: + return self._constraints[identifier] + + # HACK: Theoratically we should check whether this identifier is a valid + # "NAME[EXTRAS]" format, and parse out the name part with packaging or + # some regular expression. But since pip's resolver only spits out + # three kinds of identifiers: normalized PEP 503 names, normalized names + # plus extras, and Requires-Python, we can cheat a bit here. + name, open_bracket, _ = identifier.partition("[") + if open_bracket and name in self._constraints: + return self._constraints[name] + + return Constraint.empty() + + def find_matches( + self, + identifier: str, + requirements: Mapping[str, Iterator[Requirement]], + incompatibilities: Mapping[str, Iterator[Candidate]], + ) -> Iterable[Candidate]: + def _eligible_for_upgrade(name: str) -> bool: + """Are upgrades allowed for this project? + + This checks the upgrade strategy, and whether the project was one + that the user specified in the command line, in order to decide + whether we should upgrade if there's a newer version available. + + (Note that we don't need access to the `--upgrade` flag, because + an upgrade strategy of "to-satisfy-only" means that `--upgrade` + was not specified). + """ + if self._upgrade_strategy == "eager": + return True + elif self._upgrade_strategy == "only-if-needed": + return name in self._user_requested + return False + + return self._factory.find_candidates( + identifier=identifier, + requirements=requirements, + constraint=self._get_constraint(identifier), + prefers_installed=(not _eligible_for_upgrade(identifier)), + incompatibilities=incompatibilities, + ) + + def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool: + return requirement.is_satisfied_by(candidate) + + def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]: + with_requires = not self._ignore_dependencies + return [r for r in candidate.iter_dependencies(with_requires) if r is not None] + + @staticmethod + def is_backtrack_cause( + identifier: str, backtrack_causes: Sequence["PreferenceInformation"] + ) -> bool: + for backtrack_cause in backtrack_causes: + if identifier == backtrack_cause.requirement.name: + return True + if backtrack_cause.parent and identifier == backtrack_cause.parent.name: + return True + return False diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/reporter.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/reporter.py new file mode 100644 index 0000000..6ced532 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/reporter.py @@ -0,0 +1,68 @@ +from collections import defaultdict +from logging import getLogger +from typing import Any, DefaultDict + +from pip._vendor.resolvelib.reporters import BaseReporter + +from .base import Candidate, Requirement + +logger = getLogger(__name__) + + +class PipReporter(BaseReporter): + def __init__(self) -> None: + self.backtracks_by_package: DefaultDict[str, int] = defaultdict(int) + + self._messages_at_backtrack = { + 1: ( + "pip is looking at multiple versions of {package_name} to " + "determine which version is compatible with other " + "requirements. This could take a while." + ), + 8: ( + "pip is looking at multiple versions of {package_name} to " + "determine which version is compatible with other " + "requirements. This could take a while." + ), + 13: ( + "This is taking longer than usual. You might need to provide " + "the dependency resolver with stricter constraints to reduce " + "runtime. See https://pip.pypa.io/warnings/backtracking for " + "guidance. If you want to abort this run, press Ctrl + C." + ), + } + + def backtracking(self, candidate: Candidate) -> None: + self.backtracks_by_package[candidate.name] += 1 + + count = self.backtracks_by_package[candidate.name] + if count not in self._messages_at_backtrack: + return + + message = self._messages_at_backtrack[count] + logger.info("INFO: %s", message.format(package_name=candidate.name)) + + +class PipDebuggingReporter(BaseReporter): + """A reporter that does an info log for every event it sees.""" + + def starting(self) -> None: + logger.info("Reporter.starting()") + + def starting_round(self, index: int) -> None: + logger.info("Reporter.starting_round(%r)", index) + + def ending_round(self, index: int, state: Any) -> None: + logger.info("Reporter.ending_round(%r, state)", index) + + def ending(self, state: Any) -> None: + logger.info("Reporter.ending(%r)", state) + + def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None: + logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent) + + def backtracking(self, candidate: Candidate) -> None: + logger.info("Reporter.backtracking(%r)", candidate) + + def pinning(self, candidate: Candidate) -> None: + logger.info("Reporter.pinning(%r)", candidate) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/requirements.py new file mode 100644 index 0000000..c19f83c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/requirements.py @@ -0,0 +1,166 @@ +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name + +from pip._internal.req.req_install import InstallRequirement + +from .base import Candidate, CandidateLookup, Requirement, format_name + + +class ExplicitRequirement(Requirement): + def __init__(self, candidate: Candidate) -> None: + self.candidate = candidate + + def __str__(self) -> str: + return str(self.candidate) + + def __repr__(self) -> str: + return "{class_name}({candidate!r})".format( + class_name=self.__class__.__name__, + candidate=self.candidate, + ) + + @property + def project_name(self) -> NormalizedName: + # No need to canonicalise - the candidate did this + return self.candidate.project_name + + @property + def name(self) -> str: + # No need to canonicalise - the candidate did this + return self.candidate.name + + def format_for_error(self) -> str: + return self.candidate.format_for_error() + + def get_candidate_lookup(self) -> CandidateLookup: + return self.candidate, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + return candidate == self.candidate + + +class SpecifierRequirement(Requirement): + def __init__(self, ireq: InstallRequirement) -> None: + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = ireq + self._extras = frozenset(ireq.extras) + + def __str__(self) -> str: + return str(self._ireq.req) + + def __repr__(self) -> str: + return "{class_name}({requirement!r})".format( + class_name=self.__class__.__name__, + requirement=str(self._ireq.req), + ) + + @property + def project_name(self) -> NormalizedName: + assert self._ireq.req, "Specifier-backed ireq is always PEP 508" + return canonicalize_name(self._ireq.req.name) + + @property + def name(self) -> str: + return format_name(self.project_name, self._extras) + + def format_for_error(self) -> str: + + # Convert comma-separated specifiers into "A, B, ..., F and G" + # This makes the specifier a bit more "human readable", without + # risking a change in meaning. (Hopefully! Not all edge cases have + # been checked) + parts = [s.strip() for s in str(self).split(",")] + if len(parts) == 0: + return "" + elif len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def get_candidate_lookup(self) -> CandidateLookup: + return None, self._ireq + + def is_satisfied_by(self, candidate: Candidate) -> bool: + assert candidate.name == self.name, ( + f"Internal issue: Candidate is not for this requirement " + f"{candidate.name} vs {self.name}" + ) + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + assert self._ireq.req, "Specifier-backed ireq is always PEP 508" + spec = self._ireq.req.specifier + return spec.contains(candidate.version, prereleases=True) + + +class RequiresPythonRequirement(Requirement): + """A requirement representing Requires-Python metadata.""" + + def __init__(self, specifier: SpecifierSet, match: Candidate) -> None: + self.specifier = specifier + self._candidate = match + + def __str__(self) -> str: + return f"Python {self.specifier}" + + def __repr__(self) -> str: + return "{class_name}({specifier!r})".format( + class_name=self.__class__.__name__, + specifier=str(self.specifier), + ) + + @property + def project_name(self) -> NormalizedName: + return self._candidate.project_name + + @property + def name(self) -> str: + return self._candidate.name + + def format_for_error(self) -> str: + return str(self) + + def get_candidate_lookup(self) -> CandidateLookup: + if self.specifier.contains(self._candidate.version, prereleases=True): + return self._candidate, None + return None, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + assert candidate.name == self._candidate.name, "Not Python candidate" + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) + + +class UnsatisfiableRequirement(Requirement): + """A requirement that cannot be satisfied.""" + + def __init__(self, name: NormalizedName) -> None: + self._name = name + + def __str__(self) -> str: + return f"{self._name} (unavailable)" + + def __repr__(self) -> str: + return "{class_name}({name!r})".format( + class_name=self.__class__.__name__, + name=str(self._name), + ) + + @property + def project_name(self) -> NormalizedName: + return self._name + + @property + def name(self) -> str: + return self._name + + def format_for_error(self) -> str: + return str(self) + + def get_candidate_lookup(self) -> CandidateLookup: + return None, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + return False diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/resolver.py new file mode 100644 index 0000000..12f9670 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/resolver.py @@ -0,0 +1,251 @@ +import functools +import logging +import os +from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible +from pip._vendor.resolvelib import Resolver as RLResolver +from pip._vendor.resolvelib.structs import DirectedGraph + +from pip._internal.cache import WheelCache +from pip._internal.index.package_finder import PackageFinder +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pip._internal.resolution.resolvelib.provider import PipProvider +from pip._internal.resolution.resolvelib.reporter import ( + PipDebuggingReporter, + PipReporter, +) + +from .base import Candidate, Requirement +from .factory import Factory + +if TYPE_CHECKING: + from pip._vendor.resolvelib.resolvers import Result as RLResult + + Result = RLResult[Requirement, Candidate, str] + + +logger = logging.getLogger(__name__) + + +class Resolver(BaseResolver): + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer: RequirementPreparer, + finder: PackageFinder, + wheel_cache: Optional[WheelCache], + make_install_req: InstallRequirementProvider, + use_user_site: bool, + ignore_dependencies: bool, + ignore_installed: bool, + ignore_requires_python: bool, + force_reinstall: bool, + upgrade_strategy: str, + py_version_info: Optional[Tuple[int, ...]] = None, + ): + super().__init__() + assert upgrade_strategy in self._allowed_strategies + + self.factory = Factory( + finder=finder, + preparer=preparer, + make_install_req=make_install_req, + wheel_cache=wheel_cache, + use_user_site=use_user_site, + force_reinstall=force_reinstall, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + py_version_info=py_version_info, + ) + self.ignore_dependencies = ignore_dependencies + self.upgrade_strategy = upgrade_strategy + self._result: Optional[Result] = None + + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + collected = self.factory.collect_root_requirements(root_reqs) + provider = PipProvider( + factory=self.factory, + constraints=collected.constraints, + ignore_dependencies=self.ignore_dependencies, + upgrade_strategy=self.upgrade_strategy, + user_requested=collected.user_requested, + ) + if "PIP_RESOLVER_DEBUG" in os.environ: + reporter: BaseReporter = PipDebuggingReporter() + else: + reporter = PipReporter() + resolver: RLResolver[Requirement, Candidate, str] = RLResolver( + provider, + reporter, + ) + + try: + try_to_avoid_resolution_too_deep = 2000000 + result = self._result = resolver.resolve( + collected.requirements, max_rounds=try_to_avoid_resolution_too_deep + ) + + except ResolutionImpossible as e: + error = self.factory.get_installation_error( + cast("ResolutionImpossible[Requirement, Candidate]", e), + collected.constraints, + ) + raise error from e + + req_set = RequirementSet(check_supported_wheels=check_supported_wheels) + for candidate in result.mapping.values(): + ireq = candidate.get_install_requirement() + if ireq is None: + continue + + # Check if there is already an installation under the same name, + # and set a flag for later stages to uninstall it, if needed. + installed_dist = self.factory.get_dist_to_uninstall(candidate) + if installed_dist is None: + # There is no existing installation -- nothing to uninstall. + ireq.should_reinstall = False + elif self.factory.force_reinstall: + # The --force-reinstall flag is set -- reinstall. + ireq.should_reinstall = True + elif installed_dist.version != candidate.version: + # The installation is different in version -- reinstall. + ireq.should_reinstall = True + elif candidate.is_editable or installed_dist.editable: + # The incoming distribution is editable, or different in + # editable-ness to installation -- reinstall. + ireq.should_reinstall = True + elif candidate.source_link and candidate.source_link.is_file: + # The incoming distribution is under file:// + if candidate.source_link.is_wheel: + # is a local wheel -- do nothing. + logger.info( + "%s is already installed with the same version as the " + "provided wheel. Use --force-reinstall to force an " + "installation of the wheel.", + ireq.name, + ) + continue + + # is a local sdist or path -- reinstall + ireq.should_reinstall = True + else: + continue + + link = candidate.source_link + if link and link.is_yanked: + # The reason can contain non-ASCII characters, Unicode + # is required for Python 2. + msg = ( + "The candidate selected for download or install is a " + "yanked version: {name!r} candidate (version {version} " + "at {link})\nReason for being yanked: {reason}" + ).format( + name=candidate.name, + version=candidate.version, + link=link, + reason=link.yanked_reason or "", + ) + logger.warning(msg) + + req_set.add_named_requirement(ireq) + + reqs = req_set.all_requirements + self.factory.preparer.prepare_linked_requirements_more(reqs) + return req_set + + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: + """Get order for installation of requirements in RequirementSet. + + The returned list contains a requirement before another that depends on + it. This helps ensure that the environment is kept consistent as they + get installed one-by-one. + + The current implementation creates a topological ordering of the + dependency graph, while breaking any cycles in the graph at arbitrary + points. We make no guarantees about where the cycle would be broken, + other than they would be broken. + """ + assert self._result is not None, "must call resolve() first" + + graph = self._result.graph + weights = get_topological_weights( + graph, + expected_node_count=len(self._result.mapping) + 1, + ) + + sorted_items = sorted( + req_set.requirements.items(), + key=functools.partial(_req_set_item_sorter, weights=weights), + reverse=True, + ) + return [ireq for _, ireq in sorted_items] + + +def get_topological_weights( + graph: "DirectedGraph[Optional[str]]", expected_node_count: int +) -> Dict[Optional[str], int]: + """Assign weights to each node based on how "deep" they are. + + This implementation may change at any point in the future without prior + notice. + + We take the length for the longest path to any node from root, ignoring any + paths that contain a single node twice (i.e. cycles). This is done through + a depth-first search through the graph, while keeping track of the path to + the node. + + Cycles in the graph result would result in node being revisited while also + being it's own path. In this case, take no action. This helps ensure we + don't get stuck in a cycle. + + When assigning weight, the longer path (i.e. larger length) is preferred. + """ + path: Set[Optional[str]] = set() + weights: Dict[Optional[str], int] = {} + + def visit(node: Optional[str]) -> None: + if node in path: + # We hit a cycle, so we'll break it here. + return + + # Time to visit the children! + path.add(node) + for child in graph.iter_children(node): + visit(child) + path.remove(node) + + last_known_parent_count = weights.get(node, 0) + weights[node] = max(last_known_parent_count, len(path)) + + # `None` is guaranteed to be the root node by resolvelib. + visit(None) + + # Sanity checks + assert weights[None] == 0 + assert len(weights) == expected_node_count + + return weights + + +def _req_set_item_sorter( + item: Tuple[str, InstallRequirement], + weights: Dict[Optional[str], int], +) -> Tuple[int, str]: + """Key function used to sort install requirements for installation. + + Based on the "weight" mapping calculated in ``get_installation_order()``. + The canonical package name is returned as the second member as a tie- + breaker to ensure the result is predictable, which is useful in tests. + """ + name = canonicalize_name(item[0]) + return weights[name], name diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/self_outdated_check.py b/.venv/lib/python3.9/site-packages/pip/_internal/self_outdated_check.py new file mode 100644 index 0000000..72f70fc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/self_outdated_check.py @@ -0,0 +1,182 @@ +import datetime +import hashlib +import json +import logging +import optparse +import os.path +import sys +from typing import Any, Dict + +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import get_default_environment +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.session import PipSession +from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace +from pip._internal.utils.misc import ensure_dir + +SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" + + +logger = logging.getLogger(__name__) + + +def _get_statefile_name(key: str) -> str: + key_bytes = key.encode() + name = hashlib.sha224(key_bytes).hexdigest() + return name + + +class SelfCheckState: + def __init__(self, cache_dir: str) -> None: + self.state: Dict[str, Any] = {} + self.statefile_path = None + + # Try to load the existing state + if cache_dir: + self.statefile_path = os.path.join( + cache_dir, "selfcheck", _get_statefile_name(self.key) + ) + try: + with open(self.statefile_path, encoding="utf-8") as statefile: + self.state = json.load(statefile) + except (OSError, ValueError, KeyError): + # Explicitly suppressing exceptions, since we don't want to + # error out if the cache file is invalid. + pass + + @property + def key(self) -> str: + return sys.prefix + + def save(self, pypi_version: str, current_time: datetime.datetime) -> None: + # If we do not have a path to cache in, don't bother saving. + if not self.statefile_path: + return + + # Check to make sure that we own the directory + if not check_path_owner(os.path.dirname(self.statefile_path)): + return + + # Now that we've ensured the directory is owned by this user, we'll go + # ahead and make sure that all our directories are created. + ensure_dir(os.path.dirname(self.statefile_path)) + + state = { + # Include the key so it's easy to tell which pip wrote the + # file. + "key": self.key, + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + } + + text = json.dumps(state, sort_keys=True, separators=(",", ":")) + + with adjacent_tmp_file(self.statefile_path) as f: + f.write(text.encode()) + + try: + # Since we have a prefix-specific state file, we can just + # overwrite whatever is there, no need to check. + replace(f.name, self.statefile_path) + except OSError: + # Best effort. + pass + + +def was_installed_by_pip(pkg: str) -> bool: + """Checks whether pkg was installed by pip + + This is used not to display the upgrade message when pip is in fact + installed by system package manager, such as dnf on Fedora. + """ + dist = get_default_environment().get_distribution(pkg) + return dist is not None and "pip" == dist.installer + + +def pip_self_version_check(session: PipSession, options: optparse.Values) -> None: + """Check for an update for pip. + + Limit the frequency of checks to once per week. State is stored either in + the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix + of the pip script path. + """ + installed_dist = get_default_environment().get_distribution("pip") + if not installed_dist: + return + + pip_version = installed_dist.version + pypi_version = None + + try: + state = SelfCheckState(cache_dir=options.cache_dir) + + current_time = datetime.datetime.utcnow() + # Determine if we need to refresh the state + if "last_check" in state.state and "pypi_version" in state.state: + last_check = datetime.datetime.strptime( + state.state["last_check"], SELFCHECK_DATE_FMT + ) + if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: + pypi_version = state.state["pypi_version"] + + # Refresh the version if we need to or just see if we need to warn + if pypi_version is None: + # Lets use PackageFinder to see what the latest pip version is + link_collector = LinkCollector.create( + session, + options=options, + suppress_no_index=True, + ) + + # Pass allow_yanked=False so we don't suggest upgrading to a + # yanked version. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=False, # Explicitly set to False + ) + + finder = PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + ) + best_candidate = finder.find_best_candidate("pip").best_candidate + if best_candidate is None: + return + pypi_version = str(best_candidate.version) + + # save that we've performed a check + state.save(pypi_version, current_time) + + remote_version = parse_version(pypi_version) + + local_version_is_older = ( + pip_version < remote_version + and pip_version.base_version != remote_version.base_version + and was_installed_by_pip("pip") + ) + + # Determine if our pypi_version is older + if not local_version_is_older: + return + + # We cannot tell how the current pip is available in the current + # command context, so be pragmatic here and suggest the command + # that's always available. This does not accommodate spaces in + # `sys.executable`. + pip_cmd = f"{sys.executable} -m pip" + logger.warning( + "You are using pip version %s; however, version %s is " + "available.\nYou should consider upgrading via the " + "'%s install --upgrade pip' command.", + pip_version, + pypi_version, + pip_cmd, + ) + except Exception: + logger.debug( + "There was an error checking the latest version of pip", + exc_info=True, + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e24814a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/_log.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/_log.cpython-39.pyc new file mode 100644 index 0000000..dafe4e0 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/_log.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc new file mode 100644 index 0000000..22b4941 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compat.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compat.cpython-39.pyc new file mode 100644 index 0000000..6792e5c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compat.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc new file mode 100644 index 0000000..65f4654 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-39.pyc new file mode 100644 index 0000000..68e7155 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc new file mode 100644 index 0000000..491a8ac Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc new file mode 100644 index 0000000..e106398 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc new file mode 100644 index 0000000..e66daed Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc new file mode 100644 index 0000000..2224643 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-39.pyc new file mode 100644 index 0000000..72b40e2 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc new file mode 100644 index 0000000..54b045d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc new file mode 100644 index 0000000..1d0ccf7 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc new file mode 100644 index 0000000..7172c69 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-39.pyc new file mode 100644 index 0000000..7ecb4f7 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-39.pyc new file mode 100644 index 0000000..350381e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc new file mode 100644 index 0000000..31a0675 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/logging.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/logging.cpython-39.pyc new file mode 100644 index 0000000..57c2c37 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/logging.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/misc.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/misc.cpython-39.pyc new file mode 100644 index 0000000..27ec6e1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/misc.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/models.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/models.cpython-39.pyc new file mode 100644 index 0000000..a2af7ba Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/models.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-39.pyc new file mode 100644 index 0000000..7e59e92 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/parallel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/parallel.cpython-39.pyc new file mode 100644 index 0000000..16fc1b2 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/parallel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-39.pyc new file mode 100644 index 0000000..9a8d093 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc new file mode 100644 index 0000000..dc044a1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc new file mode 100644 index 0000000..28dfb9e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc new file mode 100644 index 0000000..b7f141d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc new file mode 100644 index 0000000..96b8722 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/urls.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/urls.cpython-39.pyc new file mode 100644 index 0000000..e92af48 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/urls.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc new file mode 100644 index 0000000..5718624 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-39.pyc new file mode 100644 index 0000000..f39fd9c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/_log.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/_log.py new file mode 100644 index 0000000..92c4c6a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/_log.py @@ -0,0 +1,38 @@ +"""Customize logging + +Defines custom logger class for the `logger.verbose(...)` method. + +init_logging() must be called before any other modules that call logging.getLogger. +""" + +import logging +from typing import Any, cast + +# custom log level for `--verbose` output +# between DEBUG and INFO +VERBOSE = 15 + + +class VerboseLogger(logging.Logger): + """Custom Logger, defining a verbose log-level + + VERBOSE is between INFO and DEBUG. + """ + + def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None: + return self.log(VERBOSE, msg, *args, **kwargs) + + +def getLogger(name: str) -> VerboseLogger: + """logging.getLogger, but ensures our VerboseLogger class is returned""" + return cast(VerboseLogger, logging.getLogger(name)) + + +def init_logging() -> None: + """Register our VerboseLogger and VERBOSE log level. + + Should be called before any calls to getLogger(), + i.e. in pip._internal.__init__ + """ + logging.setLoggerClass(VerboseLogger) + logging.addLevelName(VERBOSE, "VERBOSE") diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/appdirs.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/appdirs.py new file mode 100644 index 0000000..16933bf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/appdirs.py @@ -0,0 +1,52 @@ +""" +This code wraps the vendored appdirs module to so the return values are +compatible for the current pip code base. + +The intention is to rewrite current usages gradually, keeping the tests pass, +and eventually drop this after all usages are changed. +""" + +import os +import sys +from typing import List + +from pip._vendor import platformdirs as _appdirs + + +def user_cache_dir(appname: str) -> str: + return _appdirs.user_cache_dir(appname, appauthor=False) + + +def _macos_user_config_dir(appname: str, roaming: bool = True) -> str: + # Use ~/Application Support/pip, if the directory exists. + path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming) + if os.path.isdir(path): + return path + + # Use a Linux-like ~/.config/pip, by default. + linux_like_path = "~/.config/" + if appname: + linux_like_path = os.path.join(linux_like_path, appname) + + return os.path.expanduser(linux_like_path) + + +def user_config_dir(appname: str, roaming: bool = True) -> str: + if sys.platform == "darwin": + return _macos_user_config_dir(appname, roaming) + + return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming) + + +# for the discussion regarding site_config_dir locations +# see +def site_config_dirs(appname: str) -> List[str]: + if sys.platform == "darwin": + return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)] + + dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True) + if sys.platform == "win32": + return [dirval] + + # Unix-y system. Look in /etc as well. + return dirval.split(os.pathsep) + ["/etc"] diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/compat.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/compat.py new file mode 100644 index 0000000..3f4d300 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/compat.py @@ -0,0 +1,63 @@ +"""Stuff that differs in different Python versions and platform +distributions.""" + +import logging +import os +import sys + +__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"] + + +logger = logging.getLogger(__name__) + + +def has_tls() -> bool: + try: + import _ssl # noqa: F401 # ignore unused + + return True + except ImportError: + pass + + from pip._vendor.urllib3.util import IS_PYOPENSSL + + return IS_PYOPENSSL + + +def get_path_uid(path: str) -> int: + """ + Return path's uid. + + Does not follow symlinks: + https://github.com/pypa/pip/pull/935#discussion_r5307003 + + Placed this function in compat due to differences on AIX and + Jython, that should eventually go away. + + :raises OSError: When path is a symlink or can't be read. + """ + if hasattr(os, "O_NOFOLLOW"): + fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) + file_uid = os.fstat(fd).st_uid + os.close(fd) + else: # AIX and Jython + # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW + if not os.path.islink(path): + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid + else: + # raise OSError for parity with os.O_NOFOLLOW above + raise OSError(f"{path} is a symlink; Will not return uid for symlinks") + return file_uid + + +# packages in the stdlib that may have installation metadata, but should not be +# considered 'installed'. this theoretically could be determined based on +# dist.location (py27:`sysconfig.get_paths()['stdlib']`, +# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may +# make this ineffective, so hard-coding +stdlib_pkgs = {"python", "wsgiref", "argparse"} + + +# windows detection, covers cpython and ironpython +WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt") diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/compatibility_tags.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/compatibility_tags.py new file mode 100644 index 0000000..b6ed9a7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/compatibility_tags.py @@ -0,0 +1,165 @@ +"""Generate and work with PEP 425 Compatibility Tags. +""" + +import re +from typing import List, Optional, Tuple + +from pip._vendor.packaging.tags import ( + PythonVersion, + Tag, + compatible_tags, + cpython_tags, + generic_tags, + interpreter_name, + interpreter_version, + mac_platforms, +) + +_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") + + +def version_info_to_nodot(version_info: Tuple[int, ...]) -> str: + # Only use up to the first two numbers. + return "".join(map(str, version_info[:2])) + + +def _mac_platforms(arch: str) -> List[str]: + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + mac_version = (int(major), int(minor)) + arches = [ + # Since we have always only checked that the platform starts + # with "macosx", for backwards-compatibility we extract the + # actual prefix provided by the user in case they provided + # something like "macosxcustom_". It may be good to remove + # this as undocumented or deprecate it in the future. + "{}_{}".format(name, arch[len("macosx_") :]) + for arch in mac_platforms(mac_version, actual_arch) + ] + else: + # arch pattern didn't match (?!) + arches = [arch] + return arches + + +def _custom_manylinux_platforms(arch: str) -> List[str]: + arches = [arch] + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch_prefix == "manylinux2014": + # manylinux1/manylinux2010 wheels run on most manylinux2014 systems + # with the exception of wheels depending on ncurses. PEP 599 states + # manylinux1/manylinux2010 wheels should be considered + # manylinux2014 wheels: + # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels + if arch_suffix in {"i686", "x86_64"}: + arches.append("manylinux2010" + arch_sep + arch_suffix) + arches.append("manylinux1" + arch_sep + arch_suffix) + elif arch_prefix == "manylinux2010": + # manylinux1 wheels run on most manylinux2010 systems with the + # exception of wheels depending on ncurses. PEP 571 states + # manylinux1 wheels should be considered manylinux2010 wheels: + # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels + arches.append("manylinux1" + arch_sep + arch_suffix) + return arches + + +def _get_custom_platforms(arch: str) -> List[str]: + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch.startswith("macosx"): + arches = _mac_platforms(arch) + elif arch_prefix in ["manylinux2014", "manylinux2010"]: + arches = _custom_manylinux_platforms(arch) + else: + arches = [arch] + return arches + + +def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]: + if not platforms: + return None + + seen = set() + result = [] + + for p in platforms: + if p in seen: + continue + additions = [c for c in _get_custom_platforms(p) if c not in seen] + seen.update(additions) + result.extend(additions) + + return result + + +def _get_python_version(version: str) -> PythonVersion: + if len(version) > 1: + return int(version[0]), int(version[1:]) + else: + return (int(version[0]),) + + +def _get_custom_interpreter( + implementation: Optional[str] = None, version: Optional[str] = None +) -> str: + if implementation is None: + implementation = interpreter_name() + if version is None: + version = interpreter_version() + return f"{implementation}{version}" + + +def get_supported( + version: Optional[str] = None, + platforms: Optional[List[str]] = None, + impl: Optional[str] = None, + abis: Optional[List[str]] = None, +) -> List[Tag]: + """Return a list of supported tags for each version specified in + `versions`. + + :param version: a string version, of the form "33" or "32", + or None. The version will be assumed to support our ABI. + :param platform: specify a list of platforms you want valid + tags for, or None. If None, use the local system platform. + :param impl: specify the exact implementation you want valid + tags for, or None. If None, use the local interpreter impl. + :param abis: specify a list of abis you want valid + tags for, or None. If None, use the local interpreter abi. + """ + supported: List[Tag] = [] + + python_version: Optional[PythonVersion] = None + if version is not None: + python_version = _get_python_version(version) + + interpreter = _get_custom_interpreter(impl, version) + + platforms = _expand_allowed_platforms(platforms) + + is_cpython = (impl or interpreter_name()) == "cp" + if is_cpython: + supported.extend( + cpython_tags( + python_version=python_version, + abis=abis, + platforms=platforms, + ) + ) + else: + supported.extend( + generic_tags( + interpreter=interpreter, + abis=abis, + platforms=platforms, + ) + ) + supported.extend( + compatible_tags( + python_version=python_version, + interpreter=interpreter, + platforms=platforms, + ) + ) + + return supported diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/datetime.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/datetime.py new file mode 100644 index 0000000..8668b3b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/datetime.py @@ -0,0 +1,11 @@ +"""For when pip wants to check the date or time. +""" + +import datetime + + +def today_is_later_than(year: int, month: int, day: int) -> bool: + today = datetime.date.today() + given = datetime.date(year, month, day) + + return today > given diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/deprecation.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/deprecation.py new file mode 100644 index 0000000..72bd6f2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/deprecation.py @@ -0,0 +1,120 @@ +""" +A module that implements tooling to enable easy warnings about deprecations. +""" + +import logging +import warnings +from typing import Any, Optional, TextIO, Type, Union + +from pip._vendor.packaging.version import parse + +from pip import __version__ as current_version # NOTE: tests patch this name. + +DEPRECATION_MSG_PREFIX = "DEPRECATION: " + + +class PipDeprecationWarning(Warning): + pass + + +_original_showwarning: Any = None + + +# Warnings <-> Logging Integration +def _showwarning( + message: Union[Warning, str], + category: Type[Warning], + filename: str, + lineno: int, + file: Optional[TextIO] = None, + line: Optional[str] = None, +) -> None: + if file is not None: + if _original_showwarning is not None: + _original_showwarning(message, category, filename, lineno, file, line) + elif issubclass(category, PipDeprecationWarning): + # We use a specially named logger which will handle all of the + # deprecation messages for pip. + logger = logging.getLogger("pip._internal.deprecations") + logger.warning(message) + else: + _original_showwarning(message, category, filename, lineno, file, line) + + +def install_warning_logger() -> None: + # Enable our Deprecation Warnings + warnings.simplefilter("default", PipDeprecationWarning, append=True) + + global _original_showwarning + + if _original_showwarning is None: + _original_showwarning = warnings.showwarning + warnings.showwarning = _showwarning + + +def deprecated( + *, + reason: str, + replacement: Optional[str], + gone_in: Optional[str], + feature_flag: Optional[str] = None, + issue: Optional[int] = None, +) -> None: + """Helper to deprecate existing functionality. + + reason: + Textual reason shown to the user about why this functionality has + been deprecated. Should be a complete sentence. + replacement: + Textual suggestion shown to the user about what alternative + functionality they can use. + gone_in: + The version of pip does this functionality should get removed in. + Raises an error if pip's current version is greater than or equal to + this. + feature_flag: + Command-line flag of the form --use-feature={feature_flag} for testing + upcoming functionality. + issue: + Issue number on the tracker that would serve as a useful place for + users to find related discussion and provide feedback. + """ + + # Determine whether or not the feature is already gone in this version. + is_gone = gone_in is not None and parse(current_version) >= parse(gone_in) + + message_parts = [ + (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"), + ( + gone_in, + "pip {} will enforce this behaviour change." + if not is_gone + else "Since pip {}, this is no longer supported.", + ), + ( + replacement, + "A possible replacement is {}.", + ), + ( + feature_flag, + "You can use the flag --use-feature={} to test the upcoming behaviour." + if not is_gone + else None, + ), + ( + issue, + "Discussion can be found at https://github.com/pypa/pip/issues/{}", + ), + ] + + message = " ".join( + format_str.format(value) + for value, format_str in message_parts + if format_str is not None and value is not None + ) + + # Raise as an error if this behaviour is deprecated. + if is_gone: + raise PipDeprecationWarning(message) + + warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/direct_url_helpers.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/direct_url_helpers.py new file mode 100644 index 0000000..0e8e5e1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/direct_url_helpers.py @@ -0,0 +1,87 @@ +from typing import Optional + +from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo +from pip._internal.models.link import Link +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + + +def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str: + """Convert a DirectUrl to a pip requirement string.""" + direct_url.validate() # if invalid, this is a pip bug + requirement = name + " @ " + fragments = [] + if isinstance(direct_url.info, VcsInfo): + requirement += "{}+{}@{}".format( + direct_url.info.vcs, direct_url.url, direct_url.info.commit_id + ) + elif isinstance(direct_url.info, ArchiveInfo): + requirement += direct_url.url + if direct_url.info.hash: + fragments.append(direct_url.info.hash) + else: + assert isinstance(direct_url.info, DirInfo) + requirement += direct_url.url + if direct_url.subdirectory: + fragments.append("subdirectory=" + direct_url.subdirectory) + if fragments: + requirement += "#" + "&".join(fragments) + return requirement + + +def direct_url_for_editable(source_dir: str) -> DirectUrl: + return DirectUrl( + url=path_to_url(source_dir), + info=DirInfo(editable=True), + ) + + +def direct_url_from_link( + link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False +) -> DirectUrl: + if link.is_vcs: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend + url, requested_revision, _ = vcs_backend.get_url_rev_and_auth( + link.url_without_fragment + ) + # For VCS links, we need to find out and add commit_id. + if link_is_in_wheel_cache: + # If the requested VCS link corresponds to a cached + # wheel, it means the requested revision was an + # immutable commit hash, otherwise it would not have + # been cached. In that case we don't have a source_dir + # with the VCS checkout. + assert requested_revision + commit_id = requested_revision + else: + # If the wheel was not in cache, it means we have + # had to checkout from VCS to build and we have a source_dir + # which we can inspect to find out the commit id. + assert source_dir + commit_id = vcs_backend.get_revision(source_dir) + return DirectUrl( + url=url, + info=VcsInfo( + vcs=vcs_backend.name, + commit_id=commit_id, + requested_revision=requested_revision, + ), + subdirectory=link.subdirectory_fragment, + ) + elif link.is_existing_dir(): + return DirectUrl( + url=link.url_without_fragment, + info=DirInfo(), + subdirectory=link.subdirectory_fragment, + ) + else: + hash = None + hash_name = link.hash_name + if hash_name: + hash = f"{hash_name}={link.hash}" + return DirectUrl( + url=link.url_without_fragment, + info=ArchiveInfo(hash=hash), + subdirectory=link.subdirectory_fragment, + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/distutils_args.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/distutils_args.py new file mode 100644 index 0000000..e4aa5b8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/distutils_args.py @@ -0,0 +1,42 @@ +from distutils.errors import DistutilsArgError +from distutils.fancy_getopt import FancyGetopt +from typing import Dict, List + +_options = [ + ("exec-prefix=", None, ""), + ("home=", None, ""), + ("install-base=", None, ""), + ("install-data=", None, ""), + ("install-headers=", None, ""), + ("install-lib=", None, ""), + ("install-platlib=", None, ""), + ("install-purelib=", None, ""), + ("install-scripts=", None, ""), + ("prefix=", None, ""), + ("root=", None, ""), + ("user", None, ""), +] + + +# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469. +_distutils_getopt = FancyGetopt(_options) # type: ignore + + +def parse_distutils_args(args: List[str]) -> Dict[str, str]: + """Parse provided arguments, returning an object that has the + matched arguments. + + Any unknown arguments are ignored. + """ + result = {} + for arg in args: + try: + _, match = _distutils_getopt.getopt(args=[arg]) + except DistutilsArgError: + # We don't care about any other options, which here may be + # considered unrecognized since our option list is not + # exhaustive. + pass + else: + result.update(match.__dict__) + return result diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/egg_link.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/egg_link.py new file mode 100644 index 0000000..9e0da8d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/egg_link.py @@ -0,0 +1,75 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import os +import re +import sys +from typing import Optional + +from pip._internal.locations import site_packages, user_site +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) + +__all__ = [ + "egg_link_path_from_sys_path", + "egg_link_path_from_location", +] + + +def _egg_link_name(raw_name: str) -> str: + """ + Convert a Name metadata value to a .egg-link name, by applying + the same substitution as pkg_resources's safe_name function. + Note: we cannot use canonicalize_name because it has a different logic. + """ + return re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link" + + +def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]: + """ + Look for a .egg-link file for project name, by walking sys.path. + """ + egg_link_name = _egg_link_name(raw_name) + for path_item in sys.path: + egg_link = os.path.join(path_item, egg_link_name) + if os.path.isfile(egg_link): + return egg_link + return None + + +def egg_link_path_from_location(raw_name: str) -> Optional[str]: + """ + Return the path for the .egg-link file if it exists, otherwise, None. + + There's 3 scenarios: + 1) not in a virtualenv + try to find in site.USER_SITE, then site_packages + 2) in a no-global virtualenv + try to find in site_packages + 3) in a yes-global virtualenv + try to find in site_packages, then site.USER_SITE + (don't look in global location) + + For #1 and #3, there could be odd cases, where there's an egg-link in 2 + locations. + + This method will just return the first one found. + """ + sites = [] + if running_under_virtualenv(): + sites.append(site_packages) + if not virtualenv_no_global() and user_site: + sites.append(user_site) + else: + if user_site: + sites.append(user_site) + sites.append(site_packages) + + egg_link_name = _egg_link_name(raw_name) + for site in sites: + egglink = os.path.join(site, egg_link_name) + if os.path.isfile(egglink): + return egglink + return None diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/encoding.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/encoding.py new file mode 100644 index 0000000..1c73f6c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/encoding.py @@ -0,0 +1,36 @@ +import codecs +import locale +import re +import sys +from typing import List, Tuple + +BOMS: List[Tuple[bytes, str]] = [ + (codecs.BOM_UTF8, "utf-8"), + (codecs.BOM_UTF16, "utf-16"), + (codecs.BOM_UTF16_BE, "utf-16-be"), + (codecs.BOM_UTF16_LE, "utf-16-le"), + (codecs.BOM_UTF32, "utf-32"), + (codecs.BOM_UTF32_BE, "utf-32-be"), + (codecs.BOM_UTF32_LE, "utf-32-le"), +] + +ENCODING_RE = re.compile(br"coding[:=]\s*([-\w.]+)") + + +def auto_decode(data: bytes) -> str: + """Check a bytes string for a BOM to correctly detect the encoding + + Fallback to locale.getpreferredencoding(False) like open() on Python3""" + for bom, encoding in BOMS: + if data.startswith(bom): + return data[len(bom) :].decode(encoding) + # Lets check the first two lines as in PEP263 + for line in data.split(b"\n")[:2]: + if line[0:1] == b"#" and ENCODING_RE.search(line): + result = ENCODING_RE.search(line) + assert result is not None + encoding = result.groups()[0].decode("ascii") + return data.decode(encoding) + return data.decode( + locale.getpreferredencoding(False) or sys.getdefaultencoding(), + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/entrypoints.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/entrypoints.py new file mode 100644 index 0000000..07d941b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/entrypoints.py @@ -0,0 +1,28 @@ +import sys +from typing import List, Optional + +from pip._internal.cli.main import main + + +def _wrapper(args: Optional[List[str]] = None, _nowarn: bool = False) -> int: + """Central wrapper for all old entrypoints. + + Historically pip has had several entrypoints defined. Because of issues + arising from PATH, sys.path, multiple Pythons, their interactions, and most + of them having a pip installed, users suffer every time an entrypoint gets + moved. + + To alleviate this pain, and provide a mechanism for warning users and + directing them to an appropriate place for help, we now define all of + our old entrypoints as wrappers for the current one. + """ + if not _nowarn: + sys.stderr.write( + "WARNING: pip is being invoked by an old script wrapper. This will " + "fail in a future version of pip.\n" + "Please see https://github.com/pypa/pip/issues/5599 for advice on " + "fixing the underlying issue.\n" + "To avoid this problem you can invoke Python with '-m pip' instead of " + "running pip directly.\n" + ) + return main(args) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/filesystem.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/filesystem.py new file mode 100644 index 0000000..b7e6191 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/filesystem.py @@ -0,0 +1,182 @@ +import fnmatch +import os +import os.path +import random +import shutil +import stat +import sys +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from typing import Any, BinaryIO, Iterator, List, Union, cast + +from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed + +from pip._internal.utils.compat import get_path_uid +from pip._internal.utils.misc import format_size + + +def check_path_owner(path: str) -> bool: + # If we don't have a way to check the effective uid of this process, then + # we'll just assume that we own the directory. + if sys.platform == "win32" or not hasattr(os, "geteuid"): + return True + + assert os.path.isabs(path) + + previous = None + while path != previous: + if os.path.lexists(path): + # Check if path is writable by current user. + if os.geteuid() == 0: + # Special handling for root user in order to handle properly + # cases where users use sudo without -H flag. + try: + path_uid = get_path_uid(path) + except OSError: + return False + return path_uid == 0 + else: + return os.access(path, os.W_OK) + else: + previous, path = path, os.path.dirname(path) + return False # assume we don't own the path + + +def copy2_fixed(src: str, dest: str) -> None: + """Wrap shutil.copy2() but map errors copying socket files to + SpecialFileError as expected. + + See also https://bugs.python.org/issue37700. + """ + try: + shutil.copy2(src, dest) + except OSError: + for f in [src, dest]: + try: + is_socket_file = is_socket(f) + except OSError: + # An error has already occurred. Another error here is not + # a problem and we can ignore it. + pass + else: + if is_socket_file: + raise shutil.SpecialFileError(f"`{f}` is a socket") + + raise + + +def is_socket(path: str) -> bool: + return stat.S_ISSOCK(os.lstat(path).st_mode) + + +@contextmanager +def adjacent_tmp_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]: + """Return a file-like object pointing to a tmp file next to path. + + The file is created securely and is ensured to be written to disk + after the context reaches its end. + + kwargs will be passed to tempfile.NamedTemporaryFile to control + the way the temporary file will be opened. + """ + with NamedTemporaryFile( + delete=False, + dir=os.path.dirname(path), + prefix=os.path.basename(path), + suffix=".tmp", + **kwargs, + ) as f: + result = cast(BinaryIO, f) + try: + yield result + finally: + result.flush() + os.fsync(result.fileno()) + + +# Tenacity raises RetryError by default, explicitly raise the original exception +_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25)) + +replace = _replace_retry(os.replace) + + +# test_writable_dir and _test_writable_dir_win are copied from Flit, +# with the author's agreement to also place them under pip's license. +def test_writable_dir(path: str) -> bool: + """Check if a directory is writable. + + Uses os.access() on POSIX, tries creating files on Windows. + """ + # If the directory doesn't exist, find the closest parent that does. + while not os.path.isdir(path): + parent = os.path.dirname(path) + if parent == path: + break # Should never get here, but infinite loops are bad + path = parent + + if os.name == "posix": + return os.access(path, os.W_OK) + + return _test_writable_dir_win(path) + + +def _test_writable_dir_win(path: str) -> bool: + # os.access doesn't work on Windows: http://bugs.python.org/issue2528 + # and we can't use tempfile: http://bugs.python.org/issue22107 + basename = "accesstest_deleteme_fishfingers_custard_" + alphabet = "abcdefghijklmnopqrstuvwxyz0123456789" + for _ in range(10): + name = basename + "".join(random.choice(alphabet) for _ in range(6)) + file = os.path.join(path, name) + try: + fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL) + except FileExistsError: + pass + except PermissionError: + # This could be because there's a directory with the same name. + # But it's highly unlikely there's a directory called that, + # so we'll assume it's because the parent dir is not writable. + # This could as well be because the parent dir is not readable, + # due to non-privileged user access. + return False + else: + os.close(fd) + os.unlink(file) + return True + + # This should never be reached + raise OSError("Unexpected condition testing for writable directory") + + +def find_files(path: str, pattern: str) -> List[str]: + """Returns a list of absolute paths of files beneath path, recursively, + with filenames which match the UNIX-style shell glob pattern.""" + result: List[str] = [] + for root, _, files in os.walk(path): + matches = fnmatch.filter(files, pattern) + result.extend(os.path.join(root, f) for f in matches) + return result + + +def file_size(path: str) -> Union[int, float]: + # If it's a symlink, return 0. + if os.path.islink(path): + return 0 + return os.path.getsize(path) + + +def format_file_size(path: str) -> str: + return format_size(file_size(path)) + + +def directory_size(path: str) -> Union[int, float]: + size = 0.0 + for root, _dirs, files in os.walk(path): + for filename in files: + file_path = os.path.join(root, filename) + size += file_size(file_path) + return size + + +def format_directory_size(path: str) -> str: + return format_size(directory_size(path)) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/filetypes.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/filetypes.py new file mode 100644 index 0000000..5948570 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/filetypes.py @@ -0,0 +1,27 @@ +"""Filetype information. +""" + +from typing import Tuple + +from pip._internal.utils.misc import splitext + +WHEEL_EXTENSION = ".whl" +BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz") +XZ_EXTENSIONS: Tuple[str, ...] = ( + ".tar.xz", + ".txz", + ".tlz", + ".tar.lz", + ".tar.lzma", +) +ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION) +TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar") +ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS + + +def is_archive_file(name: str) -> bool: + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/glibc.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/glibc.py new file mode 100644 index 0000000..7bd3c20 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/glibc.py @@ -0,0 +1,88 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import os +import sys +from typing import Optional, Tuple + + +def glibc_version_string() -> Optional[str]: + "Returns glibc version string, or None if not using glibc." + return glibc_version_string_confstr() or glibc_version_string_ctypes() + + +def glibc_version_string_confstr() -> Optional[str]: + "Primary implementation of glibc_version_string using os.confstr." + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module: + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + if sys.platform == "win32": + return None + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": + _, version = os.confstr("CS_GNU_LIBC_VERSION").split() + except (AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def glibc_version_string_ctypes() -> Optional[str]: + "Fallback implementation of glibc_version_string using ctypes." + + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + process_namespace = ctypes.CDLL(None) + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# platform.libc_ver regularly returns completely nonsensical glibc +# versions. E.g. on my computer, platform says: +# +# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.7') +# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.9') +# +# But the truth is: +# +# ~$ ldd --version +# ldd (Debian GLIBC 2.22-11) 2.22 +# +# This is unfortunate, because it means that the linehaul data on libc +# versions that was generated by pip 8.1.2 and earlier is useless and +# misleading. Solution: instead of using platform, use our code that actually +# works. +def libc_ver() -> Tuple[str, str]: + """Try to determine the glibc version + + Returns a tuple of strings (lib, version) which default to empty strings + in case the lookup fails. + """ + glibc_version = glibc_version_string() + if glibc_version is None: + return ("", "") + else: + return ("glibc", glibc_version) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/hashes.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/hashes.py new file mode 100644 index 0000000..82eb035 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/hashes.py @@ -0,0 +1,144 @@ +import hashlib +from typing import TYPE_CHECKING, BinaryIO, Dict, Iterator, List + +from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError +from pip._internal.utils.misc import read_chunks + +if TYPE_CHECKING: + from hashlib import _Hash + + # NoReturn introduced in 3.6.2; imported only for type checking to maintain + # pip compatibility with older patch versions of Python 3.6 + from typing import NoReturn + + +# The recommended hash algo of the moment. Change this whenever the state of +# the art changes; it won't hurt backward compatibility. +FAVORITE_HASH = "sha256" + + +# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` +# Currently, those are the ones at least as collision-resistant as sha256. +STRONG_HASHES = ["sha256", "sha384", "sha512"] + + +class Hashes: + """A wrapper that builds multiple hashes at once and checks them against + known-good values + + """ + + def __init__(self, hashes: Dict[str, List[str]] = None) -> None: + """ + :param hashes: A dict of algorithm names pointing to lists of allowed + hex digests + """ + allowed = {} + if hashes is not None: + for alg, keys in hashes.items(): + # Make sure values are always sorted (to ease equality checks) + allowed[alg] = sorted(keys) + self._allowed = allowed + + def __and__(self, other: "Hashes") -> "Hashes": + if not isinstance(other, Hashes): + return NotImplemented + + # If either of the Hashes object is entirely empty (i.e. no hash + # specified at all), all hashes from the other object are allowed. + if not other: + return self + if not self: + return other + + # Otherwise only hashes that present in both objects are allowed. + new = {} + for alg, values in other._allowed.items(): + if alg not in self._allowed: + continue + new[alg] = [v for v in values if v in self._allowed[alg]] + return Hashes(new) + + @property + def digest_count(self) -> int: + return sum(len(digests) for digests in self._allowed.values()) + + def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool: + """Return whether the given hex digest is allowed.""" + return hex_digest in self._allowed.get(hash_name, []) + + def check_against_chunks(self, chunks: Iterator[bytes]) -> None: + """Check good hashes against ones built from iterable of chunks of + data. + + Raise HashMismatch if none match. + + """ + gots = {} + for hash_name in self._allowed.keys(): + try: + gots[hash_name] = hashlib.new(hash_name) + except (ValueError, TypeError): + raise InstallationError(f"Unknown hash name: {hash_name}") + + for chunk in chunks: + for hash in gots.values(): + hash.update(chunk) + + for hash_name, got in gots.items(): + if got.hexdigest() in self._allowed[hash_name]: + return + self._raise(gots) + + def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn": + raise HashMismatch(self._allowed, gots) + + def check_against_file(self, file: BinaryIO) -> None: + """Check good hashes against a file-like object + + Raise HashMismatch if none match. + + """ + return self.check_against_chunks(read_chunks(file)) + + def check_against_path(self, path: str) -> None: + with open(path, "rb") as file: + return self.check_against_file(file) + + def __bool__(self) -> bool: + """Return whether I know any known-good hashes.""" + return bool(self._allowed) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Hashes): + return NotImplemented + return self._allowed == other._allowed + + def __hash__(self) -> int: + return hash( + ",".join( + sorted( + ":".join((alg, digest)) + for alg, digest_list in self._allowed.items() + for digest in digest_list + ) + ) + ) + + +class MissingHashes(Hashes): + """A workalike for Hashes used when we're missing a hash for a requirement + + It computes the actual hash of the requirement and raises a HashMissing + exception showing it to the user. + + """ + + def __init__(self) -> None: + """Don't offer the ``hashes`` kwarg.""" + # Pass our favorite hash in to generate a "gotten hash". With the + # empty list, it will never match, so an error will always raise. + super().__init__(hashes={FAVORITE_HASH: []}) + + def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn": + raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/inject_securetransport.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/inject_securetransport.py new file mode 100644 index 0000000..276aa79 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/inject_securetransport.py @@ -0,0 +1,35 @@ +"""A helper module that injects SecureTransport, on import. + +The import should be done as early as possible, to ensure all requests and +sessions (or whatever) are created after injecting SecureTransport. + +Note that we only do the injection on macOS, when the linked OpenSSL is too +old to handle TLSv1.2. +""" + +import sys + + +def inject_securetransport() -> None: + # Only relevant on macOS + if sys.platform != "darwin": + return + + try: + import ssl + except ImportError: + return + + # Checks for OpenSSL 1.0.1 + if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F: + return + + try: + from pip._vendor.urllib3.contrib import securetransport + except (ImportError, OSError): + return + + securetransport.inject_into_urllib3() + + +inject_securetransport() diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/logging.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/logging.py new file mode 100644 index 0000000..a4b828a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/logging.py @@ -0,0 +1,358 @@ +import contextlib +import errno +import logging +import logging.handlers +import os +import sys +from logging import Filter +from typing import IO, Any, Callable, Iterator, Optional, TextIO, Type, cast + +from pip._internal.utils._log import VERBOSE, getLogger +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX +from pip._internal.utils.misc import ensure_dir + +try: + import threading +except ImportError: + import dummy_threading as threading # type: ignore + + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + + +_log_state = threading.local() +subprocess_logger = getLogger("pip.subprocessor") + + +class BrokenStdoutLoggingError(Exception): + """ + Raised if BrokenPipeError occurs for the stdout stream while logging. + """ + + +def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool: + if exc_class is BrokenPipeError: + return True + + # On Windows, a broken pipe can show up as EINVAL rather than EPIPE: + # https://bugs.python.org/issue19612 + # https://bugs.python.org/issue30418 + if not WINDOWS: + return False + + return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE) + + +@contextlib.contextmanager +def indent_log(num: int = 2) -> Iterator[None]: + """ + A context manager which will cause the log output to be indented for any + log messages emitted inside it. + """ + # For thread-safety + _log_state.indentation = get_indentation() + _log_state.indentation += num + try: + yield + finally: + _log_state.indentation -= num + + +def get_indentation() -> int: + return getattr(_log_state, "indentation", 0) + + +class IndentingFormatter(logging.Formatter): + default_time_format = "%Y-%m-%dT%H:%M:%S" + + def __init__( + self, + *args: Any, + add_timestamp: bool = False, + **kwargs: Any, + ) -> None: + """ + A logging.Formatter that obeys the indent_log() context manager. + + :param add_timestamp: A bool indicating output lines should be prefixed + with their record's timestamp. + """ + self.add_timestamp = add_timestamp + super().__init__(*args, **kwargs) + + def get_message_start(self, formatted: str, levelno: int) -> str: + """ + Return the start of the formatted log message (not counting the + prefix to add to each line). + """ + if levelno < logging.WARNING: + return "" + if formatted.startswith(DEPRECATION_MSG_PREFIX): + # Then the message already has a prefix. We don't want it to + # look like "WARNING: DEPRECATION: ...." + return "" + if levelno < logging.ERROR: + return "WARNING: " + + return "ERROR: " + + def format(self, record: logging.LogRecord) -> str: + """ + Calls the standard formatter, but will indent all of the log message + lines by our current indentation level. + """ + formatted = super().format(record) + message_start = self.get_message_start(formatted, record.levelno) + formatted = message_start + formatted + + prefix = "" + if self.add_timestamp: + prefix = f"{self.formatTime(record)} " + prefix += " " * get_indentation() + formatted = "".join([prefix + line for line in formatted.splitlines(True)]) + return formatted + + +def _color_wrap(*colors: str) -> Callable[[str], str]: + def wrapped(inp: str) -> str: + return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) + + return wrapped + + +class ColorizedStreamHandler(logging.StreamHandler): + + # Don't build up a list of colors if we don't have colorama + if colorama: + COLORS = [ + # This needs to be in order from highest logging level to lowest. + (logging.ERROR, _color_wrap(colorama.Fore.RED)), + (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)), + ] + else: + COLORS = [] + + def __init__(self, stream: Optional[TextIO] = None, no_color: bool = None) -> None: + super().__init__(stream) + self._no_color = no_color + + if WINDOWS and colorama: + self.stream = colorama.AnsiToWin32(self.stream) + + def _using_stdout(self) -> bool: + """ + Return whether the handler is using sys.stdout. + """ + if WINDOWS and colorama: + # Then self.stream is an AnsiToWin32 object. + stream = cast(colorama.AnsiToWin32, self.stream) + return stream.wrapped is sys.stdout + + return self.stream is sys.stdout + + def should_color(self) -> bool: + # Don't colorize things if we do not have colorama or if told not to + if not colorama or self._no_color: + return False + + real_stream = ( + self.stream + if not isinstance(self.stream, colorama.AnsiToWin32) + else self.stream.wrapped + ) + + # If the stream is a tty we should color it + if hasattr(real_stream, "isatty") and real_stream.isatty(): + return True + + # If we have an ANSI term we should color it + if os.environ.get("TERM") == "ANSI": + return True + + # If anything else we should not color it + return False + + def format(self, record: logging.LogRecord) -> str: + msg = super().format(record) + + if self.should_color(): + for level, color in self.COLORS: + if record.levelno >= level: + msg = color(msg) + break + + return msg + + # The logging module says handleError() can be customized. + def handleError(self, record: logging.LogRecord) -> None: + exc_class, exc = sys.exc_info()[:2] + # If a broken pipe occurred while calling write() or flush() on the + # stdout stream in logging's Handler.emit(), then raise our special + # exception so we can handle it in main() instead of logging the + # broken pipe error and continuing. + if ( + exc_class + and exc + and self._using_stdout() + and _is_broken_pipe_error(exc_class, exc) + ): + raise BrokenStdoutLoggingError() + + return super().handleError(record) + + +class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): + def _open(self) -> IO[Any]: + ensure_dir(os.path.dirname(self.baseFilename)) + return super()._open() + + +class MaxLevelFilter(Filter): + def __init__(self, level: int) -> None: + self.level = level + + def filter(self, record: logging.LogRecord) -> bool: + return record.levelno < self.level + + +class ExcludeLoggerFilter(Filter): + + """ + A logging Filter that excludes records from a logger (or its children). + """ + + def filter(self, record: logging.LogRecord) -> bool: + # The base Filter class allows only records from a logger (or its + # children). + return not super().filter(record) + + +def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int: + """Configures and sets up all of the logging + + Returns the requested logging level, as its integer value. + """ + + # Determine the level to be logging at. + if verbosity >= 2: + level_number = logging.DEBUG + elif verbosity == 1: + level_number = VERBOSE + elif verbosity == -1: + level_number = logging.WARNING + elif verbosity == -2: + level_number = logging.ERROR + elif verbosity <= -3: + level_number = logging.CRITICAL + else: + level_number = logging.INFO + + level = logging.getLevelName(level_number) + + # The "root" logger should match the "console" level *unless* we also need + # to log to a user log file. + include_user_log = user_log_file is not None + if include_user_log: + additional_log_file = user_log_file + root_level = "DEBUG" + else: + additional_log_file = "/dev/null" + root_level = level + + # Disable any logging besides WARNING unless we have DEBUG level logging + # enabled for vendored libraries. + vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" + + # Shorthands for clarity + log_streams = { + "stdout": "ext://sys.stdout", + "stderr": "ext://sys.stderr", + } + handler_classes = { + "stream": "pip._internal.utils.logging.ColorizedStreamHandler", + "file": "pip._internal.utils.logging.BetterRotatingFileHandler", + } + handlers = ["console", "console_errors", "console_subprocess"] + ( + ["user_log"] if include_user_log else [] + ) + + logging.config.dictConfig( + { + "version": 1, + "disable_existing_loggers": False, + "filters": { + "exclude_warnings": { + "()": "pip._internal.utils.logging.MaxLevelFilter", + "level": logging.WARNING, + }, + "restrict_to_subprocess": { + "()": "logging.Filter", + "name": subprocess_logger.name, + }, + "exclude_subprocess": { + "()": "pip._internal.utils.logging.ExcludeLoggerFilter", + "name": subprocess_logger.name, + }, + }, + "formatters": { + "indent": { + "()": IndentingFormatter, + "format": "%(message)s", + }, + "indent_with_timestamp": { + "()": IndentingFormatter, + "format": "%(message)s", + "add_timestamp": True, + }, + }, + "handlers": { + "console": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stdout"], + "filters": ["exclude_subprocess", "exclude_warnings"], + "formatter": "indent", + }, + "console_errors": { + "level": "WARNING", + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "filters": ["exclude_subprocess"], + "formatter": "indent", + }, + # A handler responsible for logging to the console messages + # from the "subprocessor" logger. + "console_subprocess": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "filters": ["restrict_to_subprocess"], + "formatter": "indent", + }, + "user_log": { + "level": "DEBUG", + "class": handler_classes["file"], + "filename": additional_log_file, + "encoding": "utf-8", + "delay": True, + "formatter": "indent_with_timestamp", + }, + }, + "root": { + "level": root_level, + "handlers": handlers, + }, + "loggers": {"pip._vendor": {"level": vendored_log_level}}, + } + ) + + return level_number diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/misc.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/misc.py new file mode 100644 index 0000000..d3e9053 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/misc.py @@ -0,0 +1,689 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import contextlib +import errno +import getpass +import hashlib +import io +import logging +import os +import posixpath +import shutil +import stat +import sys +import urllib.parse +from io import StringIO +from itertools import filterfalse, tee, zip_longest +from types import TracebackType +from typing import ( + Any, + BinaryIO, + Callable, + ContextManager, + Iterable, + Iterator, + List, + Optional, + TextIO, + Tuple, + Type, + TypeVar, + cast, +) + +from pip._vendor.pkg_resources import Distribution +from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed + +from pip import __version__ +from pip._internal.exceptions import CommandError +from pip._internal.locations import get_major_minor_version, site_packages, user_site +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.egg_link import egg_link_path_from_location +from pip._internal.utils.virtualenv import running_under_virtualenv + +__all__ = [ + "rmtree", + "display_path", + "backup_dir", + "ask", + "splitext", + "format_size", + "is_installable_dir", + "normalize_path", + "renames", + "get_prog", + "captured_stdout", + "ensure_dir", + "remove_auth_from_url", +] + + +logger = logging.getLogger(__name__) + +T = TypeVar("T") +ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] +VersionInfo = Tuple[int, int, int] +NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]] + + +def get_pip_version() -> str: + pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") + pip_pkg_dir = os.path.abspath(pip_pkg_dir) + + return "pip {} from {} (python {})".format( + __version__, + pip_pkg_dir, + get_major_minor_version(), + ) + + +def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]: + """ + Convert a tuple of ints representing a Python version to one of length + three. + + :param py_version_info: a tuple of ints representing a Python version, + or None to specify no version. The tuple can have any length. + + :return: a tuple of length three if `py_version_info` is non-None. + Otherwise, return `py_version_info` unchanged (i.e. None). + """ + if len(py_version_info) < 3: + py_version_info += (3 - len(py_version_info)) * (0,) + elif len(py_version_info) > 3: + py_version_info = py_version_info[:3] + + return cast("VersionInfo", py_version_info) + + +def ensure_dir(path: str) -> None: + """os.path.makedirs without EEXIST.""" + try: + os.makedirs(path) + except OSError as e: + # Windows can raise spurious ENOTEMPTY errors. See #6426. + if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: + raise + + +def get_prog() -> str: + try: + prog = os.path.basename(sys.argv[0]) + if prog in ("__main__.py", "-c"): + return f"{sys.executable} -m pip" + else: + return prog + except (AttributeError, TypeError, IndexError): + pass + return "pip" + + +# Retry every half second for up to 3 seconds +# Tenacity raises RetryError by default, explicitly raise the original exception +@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) +def rmtree(dir: str, ignore_errors: bool = False) -> None: + shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) + + +def rmtree_errorhandler(func: Callable[..., Any], path: str, exc_info: ExcInfo) -> None: + """On Windows, the files in .svn are read-only, so when rmtree() tries to + remove them, an exception is thrown. We catch that here, remove the + read-only attribute, and hopefully continue without problems.""" + try: + has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) + except OSError: + # it's equivalent to os.path.exists + return + + if has_attr_readonly: + # convert to read/write + os.chmod(path, stat.S_IWRITE) + # use the original function to repeat the operation + func(path) + return + else: + raise + + +def display_path(path: str) -> str: + """Gives the display value for a given path, making it relative to cwd + if possible.""" + path = os.path.normcase(os.path.abspath(path)) + if path.startswith(os.getcwd() + os.path.sep): + path = "." + path[len(os.getcwd()) :] + return path + + +def backup_dir(dir: str, ext: str = ".bak") -> str: + """Figure out the name of a directory to back up the given dir to + (adding .bak, .bak2, etc)""" + n = 1 + extension = ext + while os.path.exists(dir + extension): + n += 1 + extension = ext + str(n) + return dir + extension + + +def ask_path_exists(message: str, options: Iterable[str]) -> str: + for action in os.environ.get("PIP_EXISTS_ACTION", "").split(): + if action in options: + return action + return ask(message, options) + + +def _check_no_input(message: str) -> None: + """Raise an error if no input is allowed.""" + if os.environ.get("PIP_NO_INPUT"): + raise Exception( + f"No input was expected ($PIP_NO_INPUT set); question: {message}" + ) + + +def ask(message: str, options: Iterable[str]) -> str: + """Ask the message interactively, with the given possible responses""" + while 1: + _check_no_input(message) + response = input(message) + response = response.strip().lower() + if response not in options: + print( + "Your response ({!r}) was not one of the expected responses: " + "{}".format(response, ", ".join(options)) + ) + else: + return response + + +def ask_input(message: str) -> str: + """Ask for input interactively.""" + _check_no_input(message) + return input(message) + + +def ask_password(message: str) -> str: + """Ask for a password interactively.""" + _check_no_input(message) + return getpass.getpass(message) + + +def strtobool(val: str) -> int: + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return 1 + elif val in ("n", "no", "f", "false", "off", "0"): + return 0 + else: + raise ValueError(f"invalid truth value {val!r}") + + +def format_size(bytes: float) -> str: + if bytes > 1000 * 1000: + return "{:.1f} MB".format(bytes / 1000.0 / 1000) + elif bytes > 10 * 1000: + return "{} kB".format(int(bytes / 1000)) + elif bytes > 1000: + return "{:.1f} kB".format(bytes / 1000.0) + else: + return "{} bytes".format(int(bytes)) + + +def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]: + """Return a list of formatted rows and a list of column sizes. + + For example:: + + >>> tabulate([['foobar', 2000], [0xdeadbeef]]) + (['foobar 2000', '3735928559'], [10, 4]) + """ + rows = [tuple(map(str, row)) for row in rows] + sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")] + table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows] + return table, sizes + + +def is_installable_dir(path: str) -> bool: + """Is path is a directory containing pyproject.toml or setup.py? + + If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for + a legacy setuptools layout by identifying setup.py. We don't check for the + setup.cfg because using it without setup.py is only available for PEP 517 + projects, which are already covered by the pyproject.toml check. + """ + if not os.path.isdir(path): + return False + if os.path.isfile(os.path.join(path, "pyproject.toml")): + return True + if os.path.isfile(os.path.join(path, "setup.py")): + return True + return False + + +def read_chunks(file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE) -> Iterator[bytes]: + """Yield pieces of data from a file-like object until EOF.""" + while True: + chunk = file.read(size) + if not chunk: + break + yield chunk + + +def normalize_path(path: str, resolve_symlinks: bool = True) -> str: + """ + Convert a path to its canonical, case-normalized, absolute version. + + """ + path = os.path.expanduser(path) + if resolve_symlinks: + path = os.path.realpath(path) + else: + path = os.path.abspath(path) + return os.path.normcase(path) + + +def splitext(path: str) -> Tuple[str, str]: + """Like os.path.splitext, but take off .tar too""" + base, ext = posixpath.splitext(path) + if base.lower().endswith(".tar"): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + +def renames(old: str, new: str) -> None: + """Like os.renames(), but handles renaming across devices.""" + # Implementation borrowed from os.renames(). + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + + shutil.move(old, new) + + head, tail = os.path.split(old) + if head and tail: + try: + os.removedirs(head) + except OSError: + pass + + +def is_local(path: str) -> bool: + """ + Return True if path is within sys.prefix, if we're running in a virtualenv. + + If we're not in a virtualenv, all paths are considered "local." + + Caution: this function assumes the head of path has been normalized + with normalize_path. + """ + if not running_under_virtualenv(): + return True + return path.startswith(normalize_path(sys.prefix)) + + +def dist_is_local(dist: Distribution) -> bool: + """ + Return True if given Distribution object is installed locally + (i.e. within current virtualenv). + + Always True if we're not in a virtualenv. + + """ + return is_local(dist_location(dist)) + + +def dist_in_usersite(dist: Distribution) -> bool: + """ + Return True if given Distribution is installed in user site. + """ + return dist_location(dist).startswith(normalize_path(user_site)) + + +def dist_in_site_packages(dist: Distribution) -> bool: + """ + Return True if given Distribution is installed in + sysconfig.get_python_lib(). + """ + return dist_location(dist).startswith(normalize_path(site_packages)) + + +def get_distribution(req_name: str) -> Optional[Distribution]: + """Given a requirement name, return the installed Distribution object. + + This searches from *all* distributions available in the environment, to + match the behavior of ``pkg_resources.get_distribution()``. + + Left for compatibility until direct pkg_resources uses are refactored out. + """ + from pip._internal.metadata import get_default_environment + from pip._internal.metadata.pkg_resources import Distribution as _Dist + + dist = get_default_environment().get_distribution(req_name) + if dist is None: + return None + return cast(_Dist, dist)._dist + + +def dist_location(dist: Distribution) -> str: + """ + Get the site-packages location of this distribution. Generally + this is dist.location, except in the case of develop-installed + packages, where dist.location is the source code location, and we + want to know where the egg-link file is. + + The returned location is normalized (in particular, with symlinks removed). + """ + egg_link = egg_link_path_from_location(dist.project_name) + if egg_link: + return normalize_path(egg_link) + return normalize_path(dist.location) + + +def write_output(msg: Any, *args: Any) -> None: + logger.info(msg, *args) + + +class StreamWrapper(StringIO): + orig_stream: TextIO = None + + @classmethod + def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper": + cls.orig_stream = orig_stream + return cls() + + # compileall.compile_dir() needs stdout.encoding to print to stdout + # https://github.com/python/mypy/issues/4125 + @property + def encoding(self): # type: ignore + return self.orig_stream.encoding + + +@contextlib.contextmanager +def captured_output(stream_name: str) -> Iterator[StreamWrapper]: + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO. + + Taken from Lib/support/__init__.py in the CPython repo. + """ + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + + +def captured_stdout() -> ContextManager[StreamWrapper]: + """Capture the output of sys.stdout: + + with captured_stdout() as stdout: + print('hello') + self.assertEqual(stdout.getvalue(), 'hello\n') + + Taken from Lib/support/__init__.py in the CPython repo. + """ + return captured_output("stdout") + + +def captured_stderr() -> ContextManager[StreamWrapper]: + """ + See captured_stdout(). + """ + return captured_output("stderr") + + +# Simulates an enum +def enum(*sequential: Any, **named: Any) -> Type[Any]: + enums = dict(zip(sequential, range(len(sequential))), **named) + reverse = {value: key for key, value in enums.items()} + enums["reverse_mapping"] = reverse + return type("Enum", (), enums) + + +def build_netloc(host: str, port: Optional[int]) -> str: + """ + Build a netloc from a host-port pair + """ + if port is None: + return host + if ":" in host: + # Only wrap host with square brackets when it is IPv6 + host = f"[{host}]" + return f"{host}:{port}" + + +def build_url_from_netloc(netloc: str, scheme: str = "https") -> str: + """ + Build a full URL from a netloc. + """ + if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc: + # It must be a bare IPv6 address, so wrap it with brackets. + netloc = f"[{netloc}]" + return f"{scheme}://{netloc}" + + +def parse_netloc(netloc: str) -> Tuple[str, Optional[int]]: + """ + Return the host-port pair from a netloc. + """ + url = build_url_from_netloc(netloc) + parsed = urllib.parse.urlparse(url) + return parsed.hostname, parsed.port + + +def split_auth_from_netloc(netloc: str) -> NetlocTuple: + """ + Parse out and remove the auth information from a netloc. + + Returns: (netloc, (username, password)). + """ + if "@" not in netloc: + return netloc, (None, None) + + # Split from the right because that's how urllib.parse.urlsplit() + # behaves if more than one @ is present (which can be checked using + # the password attribute of urlsplit()'s return value). + auth, netloc = netloc.rsplit("@", 1) + pw: Optional[str] = None + if ":" in auth: + # Split from the left because that's how urllib.parse.urlsplit() + # behaves if more than one : is present (which again can be checked + # using the password attribute of the return value) + user, pw = auth.split(":", 1) + else: + user, pw = auth, None + + user = urllib.parse.unquote(user) + if pw is not None: + pw = urllib.parse.unquote(pw) + + return netloc, (user, pw) + + +def redact_netloc(netloc: str) -> str: + """ + Replace the sensitive data in a netloc with "****", if it exists. + + For example: + - "user:pass@example.com" returns "user:****@example.com" + - "accesstoken@example.com" returns "****@example.com" + """ + netloc, (user, password) = split_auth_from_netloc(netloc) + if user is None: + return netloc + if password is None: + user = "****" + password = "" + else: + user = urllib.parse.quote(user) + password = ":****" + return "{user}{password}@{netloc}".format( + user=user, password=password, netloc=netloc + ) + + +def _transform_url( + url: str, transform_netloc: Callable[[str], Tuple[Any, ...]] +) -> Tuple[str, NetlocTuple]: + """Transform and replace netloc in a url. + + transform_netloc is a function taking the netloc and returning a + tuple. The first element of this tuple is the new netloc. The + entire tuple is returned. + + Returns a tuple containing the transformed url as item 0 and the + original tuple returned by transform_netloc as item 1. + """ + purl = urllib.parse.urlsplit(url) + netloc_tuple = transform_netloc(purl.netloc) + # stripped url + url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment) + surl = urllib.parse.urlunsplit(url_pieces) + return surl, cast("NetlocTuple", netloc_tuple) + + +def _get_netloc(netloc: str) -> NetlocTuple: + return split_auth_from_netloc(netloc) + + +def _redact_netloc(netloc: str) -> Tuple[str]: + return (redact_netloc(netloc),) + + +def split_auth_netloc_from_url(url: str) -> Tuple[str, str, Tuple[str, str]]: + """ + Parse a url into separate netloc, auth, and url with no auth. + + Returns: (url_without_auth, netloc, (username, password)) + """ + url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) + return url_without_auth, netloc, auth + + +def remove_auth_from_url(url: str) -> str: + """Return a copy of url with 'username:password@' removed.""" + # username/pass params are passed to subversion through flags + # and are not recognized in the url. + return _transform_url(url, _get_netloc)[0] + + +def redact_auth_from_url(url: str) -> str: + """Replace the password in a given url with ****.""" + return _transform_url(url, _redact_netloc)[0] + + +class HiddenText: + def __init__(self, secret: str, redacted: str) -> None: + self.secret = secret + self.redacted = redacted + + def __repr__(self) -> str: + return "".format(str(self)) + + def __str__(self) -> str: + return self.redacted + + # This is useful for testing. + def __eq__(self, other: Any) -> bool: + if type(self) != type(other): + return False + + # The string being used for redaction doesn't also have to match, + # just the raw, original string. + return self.secret == other.secret + + +def hide_value(value: str) -> HiddenText: + return HiddenText(value, redacted="****") + + +def hide_url(url: str) -> HiddenText: + redacted = redact_auth_from_url(url) + return HiddenText(url, redacted=redacted) + + +def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None: + """Protection of pip.exe from modification on Windows + + On Windows, any operation modifying pip should be run as: + python -m pip ... + """ + pip_names = [ + "pip.exe", + "pip{}.exe".format(sys.version_info[0]), + "pip{}.{}.exe".format(*sys.version_info[:2]), + ] + + # See https://github.com/pypa/pip/issues/1299 for more discussion + should_show_use_python_msg = ( + modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names + ) + + if should_show_use_python_msg: + new_command = [sys.executable, "-m", "pip"] + sys.argv[1:] + raise CommandError( + "To modify pip, please run the following command:\n{}".format( + " ".join(new_command) + ) + ) + + +def is_console_interactive() -> bool: + """Is this console interactive?""" + return sys.stdin is not None and sys.stdin.isatty() + + +def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]: + """Return (hash, length) for path using hashlib.sha256()""" + + h = hashlib.sha256() + length = 0 + with open(path, "rb") as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + return h, length + + +def is_wheel_installed() -> bool: + """ + Return whether the wheel package is installed. + """ + try: + import wheel # noqa: F401 + except ImportError: + return False + + return True + + +def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]: + """ + Return paired elements. + + For example: + s -> (s0, s1), (s2, s3), (s4, s5), ... + """ + iterable = iter(iterable) + return zip_longest(iterable, iterable) + + +def partition( + pred: Callable[[T], bool], + iterable: Iterable[T], +) -> Tuple[Iterable[T], Iterable[T]]: + """ + Use a predicate to partition entries into false entries and true entries, + like + + partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9 + """ + t1, t2 = tee(iterable) + return filterfalse(pred, t1), filter(pred, t2) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/models.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/models.py new file mode 100644 index 0000000..b6bb21a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/models.py @@ -0,0 +1,39 @@ +"""Utilities for defining models +""" + +import operator +from typing import Any, Callable, Type + + +class KeyBasedCompareMixin: + """Provides comparison capabilities that is based on a key""" + + __slots__ = ["_compare_key", "_defining_class"] + + def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None: + self._compare_key = key + self._defining_class = defining_class + + def __hash__(self) -> int: + return hash(self._compare_key) + + def __lt__(self, other: Any) -> bool: + return self._compare(other, operator.__lt__) + + def __le__(self, other: Any) -> bool: + return self._compare(other, operator.__le__) + + def __gt__(self, other: Any) -> bool: + return self._compare(other, operator.__gt__) + + def __ge__(self, other: Any) -> bool: + return self._compare(other, operator.__ge__) + + def __eq__(self, other: Any) -> bool: + return self._compare(other, operator.__eq__) + + def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool: + if not isinstance(other, self._defining_class): + return NotImplemented + + return method(self._compare_key, other._compare_key) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/packaging.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/packaging.py new file mode 100644 index 0000000..f100473 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/packaging.py @@ -0,0 +1,84 @@ +import functools +import logging +from email.message import Message +from email.parser import FeedParser +from typing import Optional, Tuple + +from pip._vendor import pkg_resources +from pip._vendor.packaging import specifiers, version +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.pkg_resources import Distribution + +from pip._internal.exceptions import NoneMetadataError +from pip._internal.utils.misc import display_path + +logger = logging.getLogger(__name__) + + +def check_requires_python( + requires_python: Optional[str], version_info: Tuple[int, ...] +) -> bool: + """ + Check if the given Python version matches a "Requires-Python" specifier. + + :param version_info: A 3-tuple of ints representing a Python + major-minor-micro version to check (e.g. `sys.version_info[:3]`). + + :return: `True` if the given Python version satisfies the requirement. + Otherwise, return `False`. + + :raises InvalidSpecifier: If `requires_python` has an invalid format. + """ + if requires_python is None: + # The package provides no information + return True + requires_python_specifier = specifiers.SpecifierSet(requires_python) + + python_version = version.parse(".".join(map(str, version_info))) + return python_version in requires_python_specifier + + +def get_metadata(dist: Distribution) -> Message: + """ + :raises NoneMetadataError: if the distribution reports `has_metadata()` + True but `get_metadata()` returns None. + """ + metadata_name = "METADATA" + if isinstance(dist, pkg_resources.DistInfoDistribution) and dist.has_metadata( + metadata_name + ): + metadata = dist.get_metadata(metadata_name) + elif dist.has_metadata("PKG-INFO"): + metadata_name = "PKG-INFO" + metadata = dist.get_metadata(metadata_name) + else: + logger.warning("No metadata found in %s", display_path(dist.location)) + metadata = "" + + if metadata is None: + raise NoneMetadataError(dist, metadata_name) + + feed_parser = FeedParser() + # The following line errors out if with a "NoneType" TypeError if + # passed metadata=None. + feed_parser.feed(metadata) + return feed_parser.close() + + +def get_installer(dist: Distribution) -> str: + if dist.has_metadata("INSTALLER"): + for line in dist.get_metadata_lines("INSTALLER"): + if line.strip(): + return line.strip() + return "" + + +@functools.lru_cache(maxsize=512) +def get_requirement(req_string: str) -> Requirement: + """Construct a packaging.Requirement object with caching""" + # Parsing requirement strings is expensive, and is also expected to happen + # with a low diversity of different arguments (at least relative the number + # constructed). This method adds a cache to requirement object creation to + # minimize repeated parsing of the same string to construct equivalent + # Requirement objects. + return Requirement(req_string) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/parallel.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/parallel.py new file mode 100644 index 0000000..e318577 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/parallel.py @@ -0,0 +1,103 @@ +"""Convenient parallelization of higher order functions. + +This module provides two helper functions, with appropriate fallbacks on +Python 2 and on systems lacking support for synchronization mechanisms: + +- map_multiprocess +- map_multithread + +These helpers work like Python 3's map, with two differences: + +- They don't guarantee the order of processing of + the elements of the iterable. +- The underlying process/thread pools chop the iterable into + a number of chunks, so that for very long iterables using + a large value for chunksize can make the job complete much faster + than using the default value of 1. +""" + +__all__ = ["map_multiprocess", "map_multithread"] + +from contextlib import contextmanager +from multiprocessing import Pool as ProcessPool +from multiprocessing import pool +from multiprocessing.dummy import Pool as ThreadPool +from typing import Callable, Iterable, Iterator, TypeVar, Union + +from pip._vendor.requests.adapters import DEFAULT_POOLSIZE + +Pool = Union[pool.Pool, pool.ThreadPool] +S = TypeVar("S") +T = TypeVar("T") + +# On platforms without sem_open, multiprocessing[.dummy] Pool +# cannot be created. +try: + import multiprocessing.synchronize # noqa +except ImportError: + LACK_SEM_OPEN = True +else: + LACK_SEM_OPEN = False + +# Incredibly large timeout to work around bpo-8296 on Python 2. +TIMEOUT = 2000000 + + +@contextmanager +def closing(pool: Pool) -> Iterator[Pool]: + """Return a context manager making sure the pool closes properly.""" + try: + yield pool + finally: + # For Pool.imap*, close and join are needed + # for the returned iterator to begin yielding. + pool.close() + pool.join() + pool.terminate() + + +def _map_fallback( + func: Callable[[S], T], iterable: Iterable[S], chunksize: int = 1 +) -> Iterator[T]: + """Make an iterator applying func to each element in iterable. + + This function is the sequential fallback either on Python 2 + where Pool.imap* doesn't react to KeyboardInterrupt + or when sem_open is unavailable. + """ + return map(func, iterable) + + +def _map_multiprocess( + func: Callable[[S], T], iterable: Iterable[S], chunksize: int = 1 +) -> Iterator[T]: + """Chop iterable into chunks and submit them to a process pool. + + For very long iterables using a large value for chunksize can make + the job complete much faster than using the default value of 1. + + Return an unordered iterator of the results. + """ + with closing(ProcessPool()) as pool: + return pool.imap_unordered(func, iterable, chunksize) + + +def _map_multithread( + func: Callable[[S], T], iterable: Iterable[S], chunksize: int = 1 +) -> Iterator[T]: + """Chop iterable into chunks and submit them to a thread pool. + + For very long iterables using a large value for chunksize can make + the job complete much faster than using the default value of 1. + + Return an unordered iterator of the results. + """ + with closing(ThreadPool(DEFAULT_POOLSIZE)) as pool: + return pool.imap_unordered(func, iterable, chunksize) + + +if LACK_SEM_OPEN: + map_multiprocess = map_multithread = _map_fallback +else: + map_multiprocess = _map_multiprocess + map_multithread = _map_multithread diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/pkg_resources.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/pkg_resources.py new file mode 100644 index 0000000..bd846aa --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/pkg_resources.py @@ -0,0 +1,33 @@ +from typing import Dict, Iterable, List + +from pip._vendor.pkg_resources import yield_lines + + +class DictMetadata: + """IMetadataProvider that reads metadata files from a dictionary.""" + + def __init__(self, metadata: Dict[str, bytes]) -> None: + self._metadata = metadata + + def has_metadata(self, name: str) -> bool: + return name in self._metadata + + def get_metadata(self, name: str) -> str: + try: + return self._metadata[name].decode() + except UnicodeDecodeError as e: + # Mirrors handling done in pkg_resources.NullProvider. + e.reason += f" in {name} file" + raise + + def get_metadata_lines(self, name: str) -> Iterable[str]: + return yield_lines(self.get_metadata(name)) + + def metadata_isdir(self, name: str) -> bool: + return False + + def metadata_listdir(self, name: str) -> List[str]: + return [] + + def run_script(self, script_name: str, namespace: str) -> None: + pass diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/setuptools_build.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/setuptools_build.py new file mode 100644 index 0000000..9d65ceb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/setuptools_build.py @@ -0,0 +1,167 @@ +import sys +from typing import List, Optional, Sequence + +# Shim to wrap setup.py invocation with setuptools +# +# We set sys.argv[0] to the path to the underlying setup.py file so +# setuptools / distutils don't take the path to the setup.py to be "-c" when +# invoking via the shim. This avoids e.g. the following manifest_maker +# warning: "warning: manifest_maker: standard file '-c' not found". +_SETUPTOOLS_SHIM = ( + "import io, os, sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};" + "f = getattr(tokenize, 'open', open)(__file__) " + "if os.path.exists(__file__) " + "else io.StringIO('from setuptools import setup; setup()');" + "code = f.read().replace('\\r\\n', '\\n');" + "f.close();" + "exec(compile(code, __file__, 'exec'))" +) + + +def make_setuptools_shim_args( + setup_py_path: str, + global_options: Sequence[str] = None, + no_user_config: bool = False, + unbuffered_output: bool = False, +) -> List[str]: + """ + Get setuptools command arguments with shim wrapped setup file invocation. + + :param setup_py_path: The path to setup.py to be wrapped. + :param global_options: Additional global options. + :param no_user_config: If True, disables personal user configuration. + :param unbuffered_output: If True, adds the unbuffered switch to the + argument list. + """ + args = [sys.executable] + if unbuffered_output: + args += ["-u"] + args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] + if global_options: + args += global_options + if no_user_config: + args += ["--no-user-cfg"] + return args + + +def make_setuptools_bdist_wheel_args( + setup_py_path: str, + global_options: Sequence[str], + build_options: Sequence[str], + destination_dir: str, +) -> List[str]: + # NOTE: Eventually, we'd want to also -S to the flags here, when we're + # isolating. Currently, it breaks Python in virtualenvs, because it + # relies on site.py to find parts of the standard library outside the + # virtualenv. + args = make_setuptools_shim_args( + setup_py_path, global_options=global_options, unbuffered_output=True + ) + args += ["bdist_wheel", "-d", destination_dir] + args += build_options + return args + + +def make_setuptools_clean_args( + setup_py_path: str, + global_options: Sequence[str], +) -> List[str]: + args = make_setuptools_shim_args( + setup_py_path, global_options=global_options, unbuffered_output=True + ) + args += ["clean", "--all"] + return args + + +def make_setuptools_develop_args( + setup_py_path: str, + global_options: Sequence[str], + install_options: Sequence[str], + no_user_config: bool, + prefix: Optional[str], + home: Optional[str], + use_user_site: bool, +) -> List[str]: + assert not (use_user_site and prefix) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + ) + + args += ["develop", "--no-deps"] + + args += install_options + + if prefix: + args += ["--prefix", prefix] + if home is not None: + args += ["--install-dir", home] + + if use_user_site: + args += ["--user", "--prefix="] + + return args + + +def make_setuptools_egg_info_args( + setup_py_path: str, + egg_info_dir: Optional[str], + no_user_config: bool, +) -> List[str]: + args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config) + + args += ["egg_info"] + + if egg_info_dir: + args += ["--egg-base", egg_info_dir] + + return args + + +def make_setuptools_install_args( + setup_py_path: str, + global_options: Sequence[str], + install_options: Sequence[str], + record_filename: str, + root: Optional[str], + prefix: Optional[str], + header_dir: Optional[str], + home: Optional[str], + use_user_site: bool, + no_user_config: bool, + pycompile: bool, +) -> List[str]: + assert not (use_user_site and prefix) + assert not (use_user_site and root) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + unbuffered_output=True, + ) + args += ["install", "--record", record_filename] + args += ["--single-version-externally-managed"] + + if root is not None: + args += ["--root", root] + if prefix is not None: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + if use_user_site: + args += ["--user", "--prefix="] + + if pycompile: + args += ["--compile"] + else: + args += ["--no-compile"] + + if header_dir: + args += ["--install-headers", header_dir] + + args += install_options + + return args diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py new file mode 100644 index 0000000..f6e8b21 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py @@ -0,0 +1,289 @@ +import logging +import os +import shlex +import subprocess +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterable, + List, + Mapping, + Optional, + Union, +) + +from pip._internal.cli.spinners import SpinnerInterface, open_spinner +from pip._internal.exceptions import InstallationSubprocessError +from pip._internal.utils.logging import VERBOSE, subprocess_logger +from pip._internal.utils.misc import HiddenText + +if TYPE_CHECKING: + # Literal was introduced in Python 3.8. + # + # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7. + from typing import Literal + +CommandArgs = List[Union[str, HiddenText]] + + +LOG_DIVIDER = "----------------------------------------" + + +def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs: + """ + Create a CommandArgs object. + """ + command_args: CommandArgs = [] + for arg in args: + # Check for list instead of CommandArgs since CommandArgs is + # only known during type-checking. + if isinstance(arg, list): + command_args.extend(arg) + else: + # Otherwise, arg is str or HiddenText. + command_args.append(arg) + + return command_args + + +def format_command_args(args: Union[List[str], CommandArgs]) -> str: + """ + Format command arguments for display. + """ + # For HiddenText arguments, display the redacted form by calling str(). + # Also, we don't apply str() to arguments that aren't HiddenText since + # this can trigger a UnicodeDecodeError in Python 2 if the argument + # has type unicode and includes a non-ascii character. (The type + # checker doesn't ensure the annotations are correct in all cases.) + return " ".join( + shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg) + for arg in args + ) + + +def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]: + """ + Return the arguments in their raw, unredacted form. + """ + return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args] + + +def make_subprocess_output_error( + cmd_args: Union[List[str], CommandArgs], + cwd: Optional[str], + lines: List[str], + exit_status: int, +) -> str: + """ + Create and return the error message to use to log a subprocess error + with command output. + + :param lines: A list of lines, each ending with a newline. + """ + command = format_command_args(cmd_args) + + # We know the joined output value ends in a newline. + output = "".join(lines) + msg = ( + # Use a unicode string to avoid "UnicodeEncodeError: 'ascii' + # codec can't encode character ..." in Python 2 when a format + # argument (e.g. `output`) has a non-ascii character. + "Command errored out with exit status {exit_status}:\n" + " command: {command_display}\n" + " cwd: {cwd_display}\n" + "Complete output ({line_count} lines):\n{output}{divider}" + ).format( + exit_status=exit_status, + command_display=command, + cwd_display=cwd, + line_count=len(lines), + output=output, + divider=LOG_DIVIDER, + ) + return msg + + +def call_subprocess( + cmd: Union[List[str], CommandArgs], + show_stdout: bool = False, + cwd: Optional[str] = None, + on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise", + extra_ok_returncodes: Optional[Iterable[int]] = None, + command_desc: Optional[str] = None, + extra_environ: Optional[Mapping[str, Any]] = None, + unset_environ: Optional[Iterable[str]] = None, + spinner: Optional[SpinnerInterface] = None, + log_failed_cmd: Optional[bool] = True, + stdout_only: Optional[bool] = False, +) -> str: + """ + Args: + show_stdout: if true, use INFO to log the subprocess's stderr and + stdout streams. Otherwise, use DEBUG. Defaults to False. + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + unset_environ: an iterable of environment variable names to unset + prior to calling subprocess.Popen(). + log_failed_cmd: if false, failed commands are not logged, only raised. + stdout_only: if true, return only stdout, else return both. When true, + logging of both stdout and stderr occurs when the subprocess has + terminated, else logging occurs as subprocess output is produced. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + if unset_environ is None: + unset_environ = [] + # Most places in pip use show_stdout=False. What this means is-- + # + # - We connect the child's output (combined stderr and stdout) to a + # single pipe, which we read. + # - We log this output to stderr at DEBUG level as it is received. + # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't + # requested), then we show a spinner so the user can still see the + # subprocess is in progress. + # - If the subprocess exits with an error, we log the output to stderr + # at ERROR level if it hasn't already been displayed to the console + # (e.g. if --verbose logging wasn't enabled). This way we don't log + # the output to the console twice. + # + # If show_stdout=True, then the above is still done, but with DEBUG + # replaced by INFO. + if show_stdout: + # Then log the subprocess output at INFO level. + log_subprocess = subprocess_logger.info + used_level = logging.INFO + else: + # Then log the subprocess output using VERBOSE. This also ensures + # it will be logged to the log file (aka user_log), if enabled. + log_subprocess = subprocess_logger.verbose + used_level = VERBOSE + + # Whether the subprocess will be visible in the console. + showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level + + # Only use the spinner if we're not showing the subprocess output + # and we have a spinner. + use_spinner = not showing_subprocess and spinner is not None + + if command_desc is None: + command_desc = format_command_args(cmd) + + log_subprocess("Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + for name in unset_environ: + env.pop(name, None) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE, + cwd=cwd, + env=env, + errors="backslashreplace", + ) + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", + exc, + command_desc, + ) + raise + all_output = [] + if not stdout_only: + assert proc.stdout + assert proc.stdin + proc.stdin.close() + # In this mode, stdout and stderr are in the same pipe. + while True: + line: str = proc.stdout.readline() + if not line: + break + line = line.rstrip() + all_output.append(line + "\n") + + # Show the line immediately. + log_subprocess(line) + # Update the spinner. + if use_spinner: + assert spinner + spinner.spin() + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + output = "".join(all_output) + else: + # In this mode, stdout and stderr are in different pipes. + # We must use communicate() which is the only safe way to read both. + out, err = proc.communicate() + # log line by line to preserve pip log indenting + for out_line in out.splitlines(): + log_subprocess(out_line) + all_output.append(out) + for err_line in err.splitlines(): + log_subprocess(err_line) + all_output.append(err) + output = out + + proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes + if use_spinner: + assert spinner + if proc_had_error: + spinner.finish("error") + else: + spinner.finish("done") + if proc_had_error: + if on_returncode == "raise": + if not showing_subprocess and log_failed_cmd: + # Then the subprocess streams haven't been logged to the + # console yet. + msg = make_subprocess_output_error( + cmd_args=cmd, + cwd=cwd, + lines=all_output, + exit_status=proc.returncode, + ) + subprocess_logger.error(msg) + raise InstallationSubprocessError(proc.returncode, command_desc) + elif on_returncode == "warn": + subprocess_logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, + proc.returncode, + cwd, + ) + elif on_returncode == "ignore": + pass + else: + raise ValueError(f"Invalid value: on_returncode={on_returncode!r}") + return output + + +def runner_with_spinner_message(message: str) -> Callable[..., None]: + """Provide a subprocess_runner that shows a spinner message. + + Intended for use with for pep517's Pep517HookCaller. Thus, the runner has + an API that matches what's expected by Pep517HookCaller.subprocess_runner. + """ + + def runner( + cmd: List[str], + cwd: Optional[str] = None, + extra_environ: Optional[Mapping[str, Any]] = None, + ) -> None: + with open_spinner(message) as spinner: + call_subprocess( + cmd, + cwd=cwd, + extra_environ=extra_environ, + spinner=spinner, + ) + + return runner diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/temp_dir.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/temp_dir.py new file mode 100644 index 0000000..442679a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/temp_dir.py @@ -0,0 +1,246 @@ +import errno +import itertools +import logging +import os.path +import tempfile +from contextlib import ExitStack, contextmanager +from typing import Any, Dict, Iterator, Optional, TypeVar, Union + +from pip._internal.utils.misc import enum, rmtree + +logger = logging.getLogger(__name__) + +_T = TypeVar("_T", bound="TempDirectory") + + +# Kinds of temporary directories. Only needed for ones that are +# globally-managed. +tempdir_kinds = enum( + BUILD_ENV="build-env", + EPHEM_WHEEL_CACHE="ephem-wheel-cache", + REQ_BUILD="req-build", +) + + +_tempdir_manager: Optional[ExitStack] = None + + +@contextmanager +def global_tempdir_manager() -> Iterator[None]: + global _tempdir_manager + with ExitStack() as stack: + old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack + try: + yield + finally: + _tempdir_manager = old_tempdir_manager + + +class TempDirectoryTypeRegistry: + """Manages temp directory behavior""" + + def __init__(self) -> None: + self._should_delete: Dict[str, bool] = {} + + def set_delete(self, kind: str, value: bool) -> None: + """Indicate whether a TempDirectory of the given kind should be + auto-deleted. + """ + self._should_delete[kind] = value + + def get_delete(self, kind: str) -> bool: + """Get configured auto-delete flag for a given TempDirectory type, + default True. + """ + return self._should_delete.get(kind, True) + + +_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None + + +@contextmanager +def tempdir_registry() -> Iterator[TempDirectoryTypeRegistry]: + """Provides a scoped global tempdir registry that can be used to dictate + whether directories should be deleted. + """ + global _tempdir_registry + old_tempdir_registry = _tempdir_registry + _tempdir_registry = TempDirectoryTypeRegistry() + try: + yield _tempdir_registry + finally: + _tempdir_registry = old_tempdir_registry + + +class _Default: + pass + + +_default = _Default() + + +class TempDirectory: + """Helper class that owns and cleans up a temporary directory. + + This class can be used as a context manager or as an OO representation of a + temporary directory. + + Attributes: + path + Location to the created temporary directory + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + Methods: + cleanup() + Deletes the temporary directory + + When used as a context manager, if the delete attribute is True, on + exiting the context the temporary directory is deleted. + """ + + def __init__( + self, + path: Optional[str] = None, + delete: Union[bool, None, _Default] = _default, + kind: str = "temp", + globally_managed: bool = False, + ): + super().__init__() + + if delete is _default: + if path is not None: + # If we were given an explicit directory, resolve delete option + # now. + delete = False + else: + # Otherwise, we wait until cleanup and see what + # tempdir_registry says. + delete = None + + # The only time we specify path is in for editables where it + # is the value of the --src option. + if path is None: + path = self._create(kind) + + self._path = path + self._deleted = False + self.delete = delete + self.kind = kind + + if globally_managed: + assert _tempdir_manager is not None + _tempdir_manager.enter_context(self) + + @property + def path(self) -> str: + assert not self._deleted, f"Attempted to access deleted path: {self._path}" + return self._path + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.path!r}>" + + def __enter__(self: _T) -> _T: + return self + + def __exit__(self, exc: Any, value: Any, tb: Any) -> None: + if self.delete is not None: + delete = self.delete + elif _tempdir_registry: + delete = _tempdir_registry.get_delete(self.kind) + else: + delete = True + + if delete: + self.cleanup() + + def _create(self, kind: str) -> str: + """Create a temporary directory and store its path in self.path""" + # We realpath here because some systems have their default tmpdir + # symlinked to another directory. This tends to confuse build + # scripts, so we canonicalize the path by traversing potential + # symlinks here. + path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) + logger.debug("Created temporary directory: %s", path) + return path + + def cleanup(self) -> None: + """Remove the temporary directory created and reset state""" + self._deleted = True + if not os.path.exists(self._path): + return + rmtree(self._path) + + +class AdjacentTempDirectory(TempDirectory): + """Helper class that creates a temporary directory adjacent to a real one. + + Attributes: + original + The original directory to create a temp directory for. + path + After calling create() or entering, contains the full + path to the temporary directory. + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + """ + + # The characters that may be used to name the temp directory + # We always prepend a ~ and then rotate through these until + # a usable name is found. + # pkg_resources raises a different error for .dist-info folder + # with leading '-' and invalid metadata + LEADING_CHARS = "-~.=%0123456789" + + def __init__(self, original: str, delete: Optional[bool] = None) -> None: + self.original = original.rstrip("/\\") + super().__init__(delete=delete) + + @classmethod + def _generate_names(cls, name: str) -> Iterator[str]: + """Generates a series of temporary names. + + The algorithm replaces the leading characters in the name + with ones that are valid filesystem characters, but are not + valid package names (for both Python and pip definitions of + package). + """ + for i in range(1, len(name)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i - 1 + ): + new_name = "~" + "".join(candidate) + name[i:] + if new_name != name: + yield new_name + + # If we make it this far, we will have to make a longer name + for i in range(len(cls.LEADING_CHARS)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i + ): + new_name = "~" + "".join(candidate) + name + if new_name != name: + yield new_name + + def _create(self, kind: str) -> str: + root, name = os.path.split(self.original) + for candidate in self._generate_names(name): + path = os.path.join(root, candidate) + try: + os.mkdir(path) + except OSError as ex: + # Continue if the name exists already + if ex.errno != errno.EEXIST: + raise + else: + path = os.path.realpath(path) + break + else: + # Final fallback on the default behavior. + path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) + + logger.debug("Created temporary directory: %s", path) + return path diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/unpacking.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/unpacking.py new file mode 100644 index 0000000..9e6ed97 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/unpacking.py @@ -0,0 +1,351 @@ +"""Utilities related archives. +""" + +import logging +import os +import shutil +import stat +import sys +import tarfile +import zipfile +from typing import Iterable, List, Optional +from zipfile import ZipInfo + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.filetypes import ( + BZ2_EXTENSIONS, + TAR_EXTENSIONS, + XZ_EXTENSIONS, + ZIP_EXTENSIONS, +) +from pip._internal.utils.misc import ensure_dir + +logger = logging.getLogger(__name__) + + +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS + +try: + import bz2 # noqa + + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug("bz2 module is not available") + +try: + # Only for Python 3.3+ + import lzma # noqa + + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug("lzma module is not available") + + +def current_umask() -> int: + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def split_leading_dir(path: str) -> List[str]: + path = path.lstrip("/").lstrip("\\") + if "/" in path and ( + ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path + ): + return path.split("/", 1) + elif "\\" in path: + return path.split("\\", 1) + else: + return [path, ""] + + +def has_leading_dir(paths: Iterable[str]) -> bool: + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def is_within_directory(directory: str, target: str) -> bool: + """ + Return true if the absolute path of target is within the directory + """ + abs_directory = os.path.abspath(directory) + abs_target = os.path.abspath(target) + + prefix = os.path.commonprefix([abs_directory, abs_target]) + return prefix == abs_directory + + +def _get_default_mode_plus_executable() -> int: + return 0o777 & ~current_umask() | 0o111 + + +def set_extracted_file_to_default_mode_plus_executable(path: str) -> None: + """ + Make file present at path have execute for user/group/world + (chmod +x) is no-op on windows per python docs + """ + os.chmod(path, _get_default_mode_plus_executable()) + + +def zip_item_is_executable(info: ZipInfo) -> bool: + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + return bool(mode and stat.S_ISREG(mode) and mode & 0o111) + + +def unzip_file(filename: str, location: str, flatten: bool = True) -> None: + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, "rb") + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if not is_within_directory(location, fn): + message = ( + "The zip file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError(message.format(filename, fn, location)) + if fn.endswith("/") or fn.endswith("\\"): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + # Don't use read() to avoid allocating an arbitrarily large + # chunk of memory for the file's content + fp = zip.open(name) + try: + with open(fn, "wb") as destfp: + shutil.copyfileobj(fp, destfp) + finally: + fp.close() + if zip_item_is_executable(info): + set_extracted_file_to_default_mode_plus_executable(fn) + finally: + zipfp.close() + + +def untar_file(filename: str, location: str) -> None: + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied on top of the + default. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"): + mode = "r:gz" + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = "r:bz2" + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = "r:xz" + elif filename.lower().endswith(".tar"): + mode = "r" + else: + logger.warning( + "Cannot determine compression type for file %s", + filename, + ) + mode = "r:*" + tar = tarfile.open(filename, mode, encoding="utf-8") + try: + leading = has_leading_dir([member.name for member in tar.getmembers()]) + + # PEP 706 added `tarfile.data_filter`, and made some other changes to + # Python's tarfile module (see below). The features were backported to + # security releases. + try: + data_filter = tarfile.data_filter + except AttributeError: + _untar_without_filter(filename, location, tar, leading) + else: + default_mode_plus_executable = _get_default_mode_plus_executable() + + def pip_filter(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo: + if leading: + member.name = split_leading_dir(member.name)[1] + orig_mode = member.mode + try: + try: + member = data_filter(member, location) + except tarfile.LinkOutsideDestinationError: + if sys.version_info[:3] in { + (3, 8, 17), + (3, 9, 17), + (3, 10, 12), + (3, 11, 4), + }: + # The tarfile filter in specific Python versions + # raises LinkOutsideDestinationError on valid input + # (https://github.com/python/cpython/issues/107845) + # Ignore the error there, but do use the + # more lax `tar_filter` + member = tarfile.tar_filter(member, location) + else: + raise + except tarfile.TarError as exc: + message = "Invalid member in the tar file {}: {}" + # Filter error messages mention the member name. + # No need to add it here. + raise InstallationError( + message.format( + filename, + exc, + ) + ) + if member.isfile() and orig_mode & 0o111: + member.mode = default_mode_plus_executable + else: + # See PEP 706 note above. + # The PEP changed this from `int` to `Optional[int]`, + # where None means "use the default". Mypy doesn't + # know this yet. + member.mode = None # type: ignore [assignment] + return member + + tar.extractall(location, filter=pip_filter) + + finally: + tar.close() + + +def is_symlink_target_in_tar(tar: tarfile.TarFile, tarinfo: tarfile.TarInfo) -> bool: + """Check if the file pointed to by the symbolic link is in the tar archive""" + linkname = os.path.join(os.path.dirname(tarinfo.name), tarinfo.linkname) + + linkname = os.path.normpath(linkname) + linkname = linkname.replace("\\", "/") + + try: + tar.getmember(linkname) + return True + except KeyError: + return False + + +def _untar_without_filter( + filename: str, + location: str, + tar: tarfile.TarFile, + leading: bool, +) -> None: + """Fallback for Python without tarfile.data_filter""" + # NOTE: This function can be removed once pip requires CPython ≥ 3.12.​ + # PEP 706 added tarfile.data_filter, made tarfile extraction operations more secure. + # This feature is fully supported from CPython 3.12 onward. + for member in tar.getmembers(): + fn = member.name + if leading: + fn = split_leading_dir(fn)[1] + path = os.path.join(location, fn) + if not is_within_directory(location, path): + message = ( + "The tar file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError(message.format(filename, path, location)) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + if not is_symlink_target_in_tar(tar, member): + message = ( + "The tar file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError( + message.format(filename, member.name, member.linkname) + ) + try: + tar._extract_member(member, path) + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + "In the tar file %s the member %s is invalid: %s", + filename, + member.name, + exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + "In the tar file %s the member %s is invalid: %s", + filename, + member.name, + exc, + ) + continue + ensure_dir(os.path.dirname(path)) + assert fp is not None + with open(path, "wb") as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + tar.utime(member, path) + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + set_extracted_file_to_default_mode_plus_executable(path) + + +def unpack_file( + filename: str, + location: str, + content_type: Optional[str] = None, +) -> None: + filename = os.path.realpath(filename) + if ( + content_type == "application/zip" + or filename.lower().endswith(ZIP_EXTENSIONS) + or zipfile.is_zipfile(filename) + ): + unzip_file(filename, location, flatten=not filename.endswith(".whl")) + elif ( + content_type == "application/x-gzip" + or tarfile.is_tarfile(filename) + or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS) + ): + untar_file(filename, location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + "Cannot unpack file %s (downloaded from %s, content-type: %s); " + "cannot detect archive format", + filename, + location, + content_type, + ) + raise InstallationError(f"Cannot determine archive format of {location}") diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/urls.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/urls.py new file mode 100644 index 0000000..6ba2e04 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/urls.py @@ -0,0 +1,62 @@ +import os +import string +import urllib.parse +import urllib.request +from typing import Optional + +from .compat import WINDOWS + + +def get_url_scheme(url: str) -> Optional[str]: + if ":" not in url: + return None + return url.split(":", 1)[0].lower() + + +def path_to_url(path: str) -> str: + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path)) + return url + + +def url_to_path(url: str) -> str: + """ + Convert a file: URL to a path. + """ + assert url.startswith( + "file:" + ), f"You can only turn file: urls into filenames (not {url!r})" + + _, netloc, path, _, _ = urllib.parse.urlsplit(url) + + if not netloc or netloc == "localhost": + # According to RFC 8089, same as empty authority. + netloc = "" + elif WINDOWS: + # If we have a UNC path, prepend UNC share notation. + netloc = "\\\\" + netloc + else: + raise ValueError( + f"non-local file URIs are not supported on this platform: {url!r}" + ) + + path = urllib.request.url2pathname(netloc + path) + + # On Windows, urlsplit parses the path as something like "/C:/Users/foo". + # This creates issues for path-related functions like io.open(), so we try + # to detect and strip the leading slash. + if ( + WINDOWS + and not netloc # Not UNC. + and len(path) >= 3 + and path[0] == "/" # Leading slash to strip. + and path[1] in string.ascii_letters # Drive letter. + and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path. + ): + path = path[1:] + + return path diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/virtualenv.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/virtualenv.py new file mode 100644 index 0000000..c926db4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/virtualenv.py @@ -0,0 +1,104 @@ +import logging +import os +import re +import site +import sys +from typing import List, Optional + +logger = logging.getLogger(__name__) +_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile( + r"include-system-site-packages\s*=\s*(?Ptrue|false)" +) + + +def _running_under_venv() -> bool: + """Checks if sys.base_prefix and sys.prefix match. + + This handles PEP 405 compliant virtual environments. + """ + return sys.prefix != getattr(sys, "base_prefix", sys.prefix) + + +def _running_under_regular_virtualenv() -> bool: + """Checks if sys.real_prefix is set. + + This handles virtual environments created with pypa's virtualenv. + """ + # pypa/virtualenv case + return hasattr(sys, "real_prefix") + + +def running_under_virtualenv() -> bool: + """Return True if we're running inside a virtualenv, False otherwise.""" + return _running_under_venv() or _running_under_regular_virtualenv() + + +def _get_pyvenv_cfg_lines() -> Optional[List[str]]: + """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines + + Returns None, if it could not read/access the file. + """ + pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg") + try: + # Although PEP 405 does not specify, the built-in venv module always + # writes with UTF-8. (pypa/pip#8717) + with open(pyvenv_cfg_file, encoding="utf-8") as f: + return f.read().splitlines() # avoids trailing newlines + except OSError: + return None + + +def _no_global_under_venv() -> bool: + """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion + + PEP 405 specifies that when system site-packages are not supposed to be + visible from a virtual environment, `pyvenv.cfg` must contain the following + line: + + include-system-site-packages = false + + Additionally, log a warning if accessing the file fails. + """ + cfg_lines = _get_pyvenv_cfg_lines() + if cfg_lines is None: + # We're not in a "sane" venv, so assume there is no system + # site-packages access (since that's PEP 405's default state). + logger.warning( + "Could not access 'pyvenv.cfg' despite a virtual environment " + "being active. Assuming global site-packages is not accessible " + "in this environment." + ) + return True + + for line in cfg_lines: + match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line) + if match is not None and match.group("value") == "false": + return True + return False + + +def _no_global_under_regular_virtualenv() -> bool: + """Check if "no-global-site-packages.txt" exists beside site.py + + This mirrors logic in pypa/virtualenv for determining whether system + site-packages are visible in the virtual environment. + """ + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_site_packages_file = os.path.join( + site_mod_dir, + "no-global-site-packages.txt", + ) + return os.path.exists(no_global_site_packages_file) + + +def virtualenv_no_global() -> bool: + """Returns a boolean, whether running in venv with no system site-packages.""" + # PEP 405 compliance needs to be checked first since virtualenv >=20 would + # return True for both checks, but is only able to use the PEP 405 config. + if _running_under_venv(): + return _no_global_under_venv() + + if _running_under_regular_virtualenv(): + return _no_global_under_regular_virtualenv() + + return False diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/utils/wheel.py b/.venv/lib/python3.9/site-packages/pip/_internal/utils/wheel.py new file mode 100644 index 0000000..03f00e4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/utils/wheel.py @@ -0,0 +1,182 @@ +"""Support functions for working with wheel files. +""" + +import logging +from email.message import Message +from email.parser import Parser +from typing import Dict, Tuple +from zipfile import BadZipFile, ZipFile + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import DistInfoDistribution, Distribution + +from pip._internal.exceptions import UnsupportedWheel +from pip._internal.utils.pkg_resources import DictMetadata + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +class WheelMetadata(DictMetadata): + """Metadata provider that maps metadata decoding exceptions to our + internal exception type. + """ + + def __init__(self, metadata: Dict[str, bytes], wheel_name: str) -> None: + super().__init__(metadata) + self._wheel_name = wheel_name + + def get_metadata(self, name: str) -> str: + try: + return super().get_metadata(name) + except UnicodeDecodeError as e: + # Augment the default error with the origin of the file. + raise UnsupportedWheel( + f"Error decoding metadata for {self._wheel_name}: {e}" + ) + + +def pkg_resources_distribution_for_wheel( + wheel_zip: ZipFile, name: str, location: str +) -> Distribution: + """Get a pkg_resources distribution given a wheel. + + :raises UnsupportedWheel: on any errors + """ + info_dir, _ = parse_wheel(wheel_zip, name) + + metadata_files = [p for p in wheel_zip.namelist() if p.startswith(f"{info_dir}/")] + + metadata_text: Dict[str, bytes] = {} + for path in metadata_files: + _, metadata_name = path.split("/", 1) + + try: + metadata_text[metadata_name] = read_wheel_metadata_file(wheel_zip, path) + except UnsupportedWheel as e: + raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e))) + + metadata = WheelMetadata(metadata_text, location) + + return DistInfoDistribution(location=location, metadata=metadata, project_name=name) + + +def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]: + """Extract information from the provided wheel, ensuring it meets basic + standards. + + Returns the name of the .dist-info directory and the parsed WHEEL metadata. + """ + try: + info_dir = wheel_dist_info_dir(wheel_zip, name) + metadata = wheel_metadata(wheel_zip, info_dir) + version = wheel_version(metadata) + except UnsupportedWheel as e: + raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e))) + + check_compatibility(version, name) + + return info_dir, metadata + + +def wheel_dist_info_dir(source: ZipFile, name: str) -> str: + """Returns the name of the contained .dist-info directory. + + Raises AssertionError or UnsupportedWheel if not found, >1 found, or + it doesn't match the provided name. + """ + # Zip file path separators must be / + subdirs = {p.split("/", 1)[0] for p in source.namelist()} + + info_dirs = [s for s in subdirs if s.endswith(".dist-info")] + + if not info_dirs: + raise UnsupportedWheel(".dist-info directory not found") + + if len(info_dirs) > 1: + raise UnsupportedWheel( + "multiple .dist-info directories found: {}".format(", ".join(info_dirs)) + ) + + info_dir = info_dirs[0] + + info_dir_name = canonicalize_name(info_dir) + canonical_name = canonicalize_name(name) + if not info_dir_name.startswith(canonical_name): + raise UnsupportedWheel( + ".dist-info directory {!r} does not start with {!r}".format( + info_dir, canonical_name + ) + ) + + return info_dir + + +def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes: + try: + return source.read(path) + # BadZipFile for general corruption, KeyError for missing entry, + # and RuntimeError for password-protected files + except (BadZipFile, KeyError, RuntimeError) as e: + raise UnsupportedWheel(f"could not read {path!r} file: {e!r}") + + +def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message: + """Return the WHEEL metadata of an extracted wheel, if possible. + Otherwise, raise UnsupportedWheel. + """ + path = f"{dist_info_dir}/WHEEL" + # Zip file path separators must be / + wheel_contents = read_wheel_metadata_file(source, path) + + try: + wheel_text = wheel_contents.decode() + except UnicodeDecodeError as e: + raise UnsupportedWheel(f"error decoding {path!r}: {e!r}") + + # FeedParser (used by Parser) does not raise any exceptions. The returned + # message may have .defects populated, but for backwards-compatibility we + # currently ignore them. + return Parser().parsestr(wheel_text) + + +def wheel_version(wheel_data: Message) -> Tuple[int, ...]: + """Given WHEEL metadata, return the parsed Wheel-Version. + Otherwise, raise UnsupportedWheel. + """ + version_text = wheel_data["Wheel-Version"] + if version_text is None: + raise UnsupportedWheel("WHEEL is missing Wheel-Version") + + version = version_text.strip() + + try: + return tuple(map(int, version.split("."))) + except ValueError: + raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}") + + +def check_compatibility(version: Tuple[int, ...], name: str) -> None: + """Raises errors or warns if called with an incompatible Wheel-Version. + + pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "{}'s Wheel-Version ({}) is not compatible with this version " + "of pip".format(name, ".".join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + "Installing from a newer Wheel-Version (%s)", + ".".join(map(str, version)), + ) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__init__.py b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__init__.py new file mode 100644 index 0000000..b6beddb --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__init__.py @@ -0,0 +1,15 @@ +# Expose a limited set of classes and functions so callers outside of +# the vcs package don't need to import deeper than `pip._internal.vcs`. +# (The test directory may still need to import from a vcs sub-package.) +# Import all vcs modules to register each VCS in the VcsSupport object. +import pip._internal.vcs.bazaar +import pip._internal.vcs.git +import pip._internal.vcs.mercurial +import pip._internal.vcs.subversion # noqa: F401 +from pip._internal.vcs.versioncontrol import ( # noqa: F401 + RemoteNotFoundError, + RemoteNotValidError, + is_url, + make_vcs_requirement_url, + vcs, +) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..c5a032e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc new file mode 100644 index 0000000..5b32e85 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/git.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/git.cpython-39.pyc new file mode 100644 index 0000000..b3130c2 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/git.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc new file mode 100644 index 0000000..cd7a014 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc new file mode 100644 index 0000000..9948802 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc new file mode 100644 index 0000000..e1e03c8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/bazaar.py b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/bazaar.py new file mode 100644 index 0000000..82e7595 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/bazaar.py @@ -0,0 +1,93 @@ +import logging +from typing import List, Optional, Tuple + +from pip._internal.utils.misc import HiddenText, display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + vcs, +) + +logger = logging.getLogger(__name__) + + +class Bazaar(VersionControl): + name = "bzr" + dirname = ".bzr" + repo_name = "branch" + schemes = ( + "bzr+http", + "bzr+https", + "bzr+ssh", + "bzr+sftp", + "bzr+ftp", + "bzr+lp", + "bzr+file", + ) + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return ["-r", rev] + + def fetch_new(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + rev_display = rev_options.to_display() + logger.info( + "Checking out %s%s to %s", + url, + rev_display, + display_path(dest), + ) + cmd_args = make_command("branch", "-q", rev_options.to_args(), url, dest) + self.run_command(cmd_args) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + self.run_command(make_command("switch", url), cwd=dest) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + cmd_args = make_command("pull", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it + url, rev, user_pass = super().get_url_rev_and_auth(url) + if url.startswith("ssh://"): + url = "bzr+" + url + return url, rev, user_pass + + @classmethod + def get_remote_url(cls, location: str) -> str: + urls = cls.run_command( + ["info"], show_stdout=False, stdout_only=True, cwd=location + ) + for line in urls.splitlines(): + line = line.strip() + for x in ("checkout of branch: ", "parent branch: "): + if line.startswith(x): + repo = line.split(x)[1] + if cls._is_local_repository(repo): + return path_to_url(repo) + return repo + raise RemoteNotFoundError + + @classmethod + def get_revision(cls, location: str) -> str: + revision = cls.run_command( + ["revno"], + show_stdout=False, + stdout_only=True, + cwd=location, + ) + return revision.splitlines()[-1] + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """Always assume the versions don't match""" + return False + + +vcs.register(Bazaar) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/git.py b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/git.py new file mode 100644 index 0000000..7a78ad1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/git.py @@ -0,0 +1,513 @@ +import logging +import os.path +import pathlib +import re +import urllib.parse +import urllib.request +from typing import List, Optional, Tuple + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import HiddenText, display_path, hide_url +from pip._internal.utils.subprocess import make_command +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RemoteNotValidError, + RevOptions, + VersionControl, + find_path_to_project_root_from_repo_root, + vcs, +) + +urlsplit = urllib.parse.urlsplit +urlunsplit = urllib.parse.urlunsplit + + +logger = logging.getLogger(__name__) + + +GIT_VERSION_REGEX = re.compile( + r"^git version " # Prefix. + r"(\d+)" # Major. + r"\.(\d+)" # Dot, minor. + r"(?:\.(\d+))?" # Optional dot, patch. + r".*$" # Suffix, including any pre- and post-release segments we don't care about. +) + +HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$") + +# SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git' +SCP_REGEX = re.compile( + r"""^ + # Optional user, e.g. 'git@' + (\w+@)? + # Server, e.g. 'github.com'. + ([^/:]+): + # The server-side path. e.g. 'user/project.git'. Must start with an + # alphanumeric character so as not to be confusable with a Windows paths + # like 'C:/foo/bar' or 'C:\foo\bar'. + (\w[^:]*) + $""", + re.VERBOSE, +) + + +def looks_like_hash(sha: str) -> bool: + return bool(HASH_REGEX.match(sha)) + + +class Git(VersionControl): + name = "git" + dirname = ".git" + repo_name = "clone" + schemes = ( + "git+http", + "git+https", + "git+ssh", + "git+git", + "git+file", + ) + # Prevent the user's environment variables from interfering with pip: + # https://github.com/pypa/pip/issues/1130 + unset_environ = ("GIT_DIR", "GIT_WORK_TREE") + default_arg_rev = "HEAD" + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return [rev] + + def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: + _, rev_options = self.get_url_rev_options(hide_url(url)) + if not rev_options.rev: + return False + if not self.is_commit_id_equal(dest, rev_options.rev): + # the current commit is different from rev, + # which means rev was something else than a commit hash + return False + # return False in the rare case rev is both a commit hash + # and a tag or a branch; we don't want to cache in that case + # because that branch/tag could point to something else in the future + is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0]) + return not is_tag_or_branch + + def get_git_version(self) -> Tuple[int, ...]: + version = self.run_command(["version"], show_stdout=False, stdout_only=True) + match = GIT_VERSION_REGEX.match(version) + if not match: + logger.warning("Can't parse git version: %s", version) + return () + return tuple(int(c) for c in match.groups()) + + @classmethod + def get_current_branch(cls, location: str) -> Optional[str]: + """ + Return the current branch, or None if HEAD isn't at a branch + (e.g. detached HEAD). + """ + # git-symbolic-ref exits with empty stdout if "HEAD" is a detached + # HEAD rather than a symbolic ref. In addition, the -q causes the + # command to exit with status code 1 instead of 128 in this case + # and to suppress the message to stderr. + args = ["symbolic-ref", "-q", "HEAD"] + output = cls.run_command( + args, + extra_ok_returncodes=(1,), + show_stdout=False, + stdout_only=True, + cwd=location, + ) + ref = output.strip() + + if ref.startswith("refs/heads/"): + return ref[len("refs/heads/") :] + + return None + + @classmethod + def get_revision_sha(cls, dest: str, rev: str) -> Tuple[Optional[str], bool]: + """ + Return (sha_or_none, is_branch), where sha_or_none is a commit hash + if the revision names a remote branch or tag, otherwise None. + + Args: + dest: the repository directory. + rev: the revision name. + """ + # Pass rev to pre-filter the list. + output = cls.run_command( + ["show-ref", rev], + cwd=dest, + show_stdout=False, + stdout_only=True, + on_returncode="ignore", + ) + refs = {} + # NOTE: We do not use splitlines here since that would split on other + # unicode separators, which can be maliciously used to install a + # different revision. + for line in output.strip().split("\n"): + line = line.rstrip("\r") + if not line: + continue + try: + ref_sha, ref_name = line.split(" ", maxsplit=2) + except ValueError: + # Include the offending line to simplify troubleshooting if + # this error ever occurs. + raise ValueError(f"unexpected show-ref line: {line!r}") + + refs[ref_name] = ref_sha + + branch_ref = f"refs/remotes/origin/{rev}" + tag_ref = f"refs/tags/{rev}" + + sha = refs.get(branch_ref) + if sha is not None: + return (sha, True) + + sha = refs.get(tag_ref) + + return (sha, False) + + @classmethod + def _should_fetch(cls, dest: str, rev: str) -> bool: + """ + Return true if rev is a ref or is a commit that we don't have locally. + + Branches and tags are not considered in this method because they are + assumed to be always available locally (which is a normal outcome of + ``git clone`` and ``git fetch --tags``). + """ + if rev.startswith("refs/"): + # Always fetch remote refs. + return True + + if not looks_like_hash(rev): + # Git fetch would fail with abbreviated commits. + return False + + if cls.has_commit(dest, rev): + # Don't fetch if we have the commit locally. + return False + + return True + + @classmethod + def resolve_revision( + cls, dest: str, url: HiddenText, rev_options: RevOptions + ) -> RevOptions: + """ + Resolve a revision to a new RevOptions object with the SHA1 of the + branch, tag, or ref if found. + + Args: + rev_options: a RevOptions object. + """ + rev = rev_options.arg_rev + # The arg_rev property's implementation for Git ensures that the + # rev return value is always non-None. + assert rev is not None + + sha, is_branch = cls.get_revision_sha(dest, rev) + + if sha is not None: + rev_options = rev_options.make_new(sha) + rev_options.branch_name = rev if is_branch else None + + return rev_options + + # Do not show a warning for the common case of something that has + # the form of a Git commit hash. + if not looks_like_hash(rev): + logger.warning( + "Did not find branch or tag '%s', assuming revision or ref.", + rev, + ) + + if not cls._should_fetch(dest, rev): + return rev_options + + # fetch the requested revision + cls.run_command( + make_command("fetch", "-q", url, rev_options.to_args()), + cwd=dest, + ) + # Change the revision to the SHA of the ref we fetched + sha = cls.get_revision(dest, rev="FETCH_HEAD") + rev_options = rev_options.make_new(sha) + + return rev_options + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """ + Return whether the current commit hash equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + if not name: + # Then avoid an unnecessary subprocess call. + return False + + return cls.get_revision(dest) == name + + def fetch_new(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + rev_display = rev_options.to_display() + logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest)) + if self.get_git_version() >= (2, 17): + # Git added support for partial clone in 2.17 + # https://git-scm.com/docs/partial-clone + # Speeds up cloning by functioning without a complete copy of repository + self.run_command( + make_command( + "clone", + "--filter=blob:none", + "-q", + url, + dest, + ) + ) + else: + self.run_command(make_command("clone", "-q", url, dest)) + + if rev_options.rev: + # Then a specific revision was requested. + rev_options = self.resolve_revision(dest, url, rev_options) + branch_name = getattr(rev_options, "branch_name", None) + logger.debug("Rev options %s, branch_name %s", rev_options, branch_name) + if branch_name is None: + # Only do a checkout if the current commit id doesn't match + # the requested revision. + if not self.is_commit_id_equal(dest, rev_options.rev): + cmd_args = make_command( + "checkout", + "-q", + rev_options.to_args(), + ) + self.run_command(cmd_args, cwd=dest) + elif self.get_current_branch(dest) != branch_name: + # Then a specific branch was requested, and that branch + # is not yet checked out. + track_branch = f"origin/{branch_name}" + cmd_args = [ + "checkout", + "-b", + branch_name, + "--track", + track_branch, + ] + self.run_command(cmd_args, cwd=dest) + else: + sha = self.get_revision(dest) + rev_options = rev_options.make_new(sha) + + logger.info("Resolved %s to commit %s", url, rev_options.rev) + + #: repo may contain submodules + self.update_submodules(dest) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + self.run_command( + make_command("config", "remote.origin.url", url), + cwd=dest, + ) + cmd_args = make_command("checkout", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + self.update_submodules(dest) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + # First fetch changes from the default remote + if self.get_git_version() >= (1, 9): + # fetch tags in addition to everything else + self.run_command(["fetch", "-q", "--tags"], cwd=dest) + else: + self.run_command(["fetch", "-q"], cwd=dest) + # Then reset to wanted revision (maybe even origin/master) + rev_options = self.resolve_revision(dest, url, rev_options) + cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + #: update submodules + self.update_submodules(dest) + + @classmethod + def get_remote_url(cls, location: str) -> str: + """ + Return URL of the first remote encountered. + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + # We need to pass 1 for extra_ok_returncodes since the command + # exits with return code 1 if there are no matching lines. + stdout = cls.run_command( + ["config", "--get-regexp", r"remote\..*\.url"], + extra_ok_returncodes=(1,), + show_stdout=False, + stdout_only=True, + cwd=location, + ) + remotes = stdout.splitlines() + try: + found_remote = remotes[0] + except IndexError: + raise RemoteNotFoundError + + for remote in remotes: + if remote.startswith("remote.origin.url "): + found_remote = remote + break + url = found_remote.split(" ")[1] + return cls._git_remote_to_pip_url(url.strip()) + + @staticmethod + def _git_remote_to_pip_url(url: str) -> str: + """ + Convert a remote url from what git uses to what pip accepts. + + There are 3 legal forms **url** may take: + + 1. A fully qualified url: ssh://git@example.com/foo/bar.git + 2. A local project.git folder: /path/to/bare/repository.git + 3. SCP shorthand for form 1: git@example.com:foo/bar.git + + Form 1 is output as-is. Form 2 must be converted to URI and form 3 must + be converted to form 1. + + See the corresponding test test_git_remote_url_to_pip() for examples of + sample inputs/outputs. + """ + if re.match(r"\w+://", url): + # This is already valid. Pass it though as-is. + return url + if os.path.exists(url): + # A local bare remote (git clone --mirror). + # Needs a file:// prefix. + return pathlib.PurePath(url).as_uri() + scp_match = SCP_REGEX.match(url) + if scp_match: + # Add an ssh:// prefix and replace the ':' with a '/'. + return scp_match.expand(r"ssh://\1\2/\3") + # Otherwise, bail out. + raise RemoteNotValidError(url) + + @classmethod + def has_commit(cls, location: str, rev: str) -> bool: + """ + Check if rev is a commit that is available in the local repository. + """ + try: + cls.run_command( + ["rev-parse", "-q", "--verify", "sha^" + rev], + cwd=location, + log_failed_cmd=False, + ) + except InstallationError: + return False + else: + return True + + @classmethod + def get_revision(cls, location: str, rev: Optional[str] = None) -> str: + if rev is None: + rev = "HEAD" + current_rev = cls.run_command( + ["rev-parse", rev], + show_stdout=False, + stdout_only=True, + cwd=location, + ) + return current_rev.strip() + + @classmethod + def get_subdirectory(cls, location: str) -> Optional[str]: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + # find the repo root + git_dir = cls.run_command( + ["rev-parse", "--git-dir"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + if not os.path.isabs(git_dir): + git_dir = os.path.join(location, git_dir) + repo_root = os.path.abspath(os.path.join(git_dir, "..")) + return find_path_to_project_root_from_repo_root(location, repo_root) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + """ + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + That's required because although they use SSH they sometimes don't + work with a ssh:// scheme (e.g. GitHub). But we need a scheme for + parsing. Hence we remove it again afterwards and return it as a stub. + """ + # Works around an apparent Git bug + # (see https://article.gmane.org/gmane.comp.version-control.git/146500) + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith("file"): + initial_slashes = path[: -len(path.lstrip("/"))] + newpath = initial_slashes + urllib.request.url2pathname(path).replace( + "\\", "/" + ).lstrip("/") + after_plus = scheme.find("+") + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + + if "://" not in url: + assert "file:" not in url + url = url.replace("git+", "git+ssh://") + url, rev, user_pass = super().get_url_rev_and_auth(url) + url = url.replace("ssh://", "") + else: + url, rev, user_pass = super().get_url_rev_and_auth(url) + + return url, rev, user_pass + + @classmethod + def update_submodules(cls, location: str) -> None: + if not os.path.exists(os.path.join(location, ".gitmodules")): + return + cls.run_command( + ["submodule", "update", "--init", "--recursive", "-q"], + cwd=location, + ) + + @classmethod + def get_repository_root(cls, location: str) -> Optional[str]: + loc = super().get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ["rev-parse", "--show-toplevel"], + cwd=location, + show_stdout=False, + stdout_only=True, + on_returncode="raise", + log_failed_cmd=False, + ) + except BadCommand: + logger.debug( + "could not determine if %s is under git control " + "because git is not available", + location, + ) + return None + except InstallationError: + return None + return os.path.normpath(r.rstrip("\r\n")) + + @staticmethod + def should_add_vcs_url_prefix(repo_url: str) -> bool: + """In either https or ssh form, requirements must be prefixed with git+.""" + return True + + +vcs.register(Git) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/mercurial.py b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/mercurial.py new file mode 100644 index 0000000..39a2e55 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/mercurial.py @@ -0,0 +1,153 @@ +import configparser +import logging +import os +from typing import List, Optional + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import HiddenText, display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + RevOptions, + VersionControl, + find_path_to_project_root_from_repo_root, + vcs, +) + +logger = logging.getLogger(__name__) + + +class Mercurial(VersionControl): + name = "hg" + dirname = ".hg" + repo_name = "clone" + schemes = ( + "hg+file", + "hg+http", + "hg+https", + "hg+ssh", + "hg+static-http", + ) + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return [f"--rev={rev}"] + + def fetch_new(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + rev_display = rev_options.to_display() + logger.info( + "Cloning hg %s%s to %s", + url, + rev_display, + display_path(dest), + ) + self.run_command(make_command("clone", "--noupdate", "-q", url, dest)) + self.run_command( + make_command("update", "-q", rev_options.to_args()), + cwd=dest, + ) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + repo_config = os.path.join(dest, self.dirname, "hgrc") + config = configparser.RawConfigParser() + try: + config.read(repo_config) + config.set("paths", "default", url.secret) + with open(repo_config, "w") as config_file: + config.write(config_file) + except (OSError, configparser.NoSectionError) as exc: + logger.warning("Could not switch Mercurial repository to %s: %s", url, exc) + else: + cmd_args = make_command("update", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + self.run_command(["pull", "-q"], cwd=dest) + cmd_args = make_command("update", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_remote_url(cls, location: str) -> str: + url = cls.run_command( + ["showconfig", "paths.default"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + if cls._is_local_repository(url): + url = path_to_url(url) + return url.strip() + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the repository-local changeset revision number, as an integer. + """ + current_revision = cls.run_command( + ["parents", "--template={rev}"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + return current_revision + + @classmethod + def get_requirement_revision(cls, location: str) -> str: + """ + Return the changeset identification hash, as a 40-character + hexadecimal string + """ + current_rev_hash = cls.run_command( + ["parents", "--template={node}"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + return current_rev_hash + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """Always assume the versions don't match""" + return False + + @classmethod + def get_subdirectory(cls, location: str) -> Optional[str]: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + # find the repo root + repo_root = cls.run_command( + ["root"], show_stdout=False, stdout_only=True, cwd=location + ).strip() + if not os.path.isabs(repo_root): + repo_root = os.path.abspath(os.path.join(location, repo_root)) + return find_path_to_project_root_from_repo_root(location, repo_root) + + @classmethod + def get_repository_root(cls, location: str) -> Optional[str]: + loc = super().get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ["root"], + cwd=location, + show_stdout=False, + stdout_only=True, + on_returncode="raise", + log_failed_cmd=False, + ) + except BadCommand: + logger.debug( + "could not determine if %s is under hg control " + "because hg is not available", + location, + ) + return None + except InstallationError: + return None + return os.path.normpath(r.rstrip("\r\n")) + + +vcs.register(Mercurial) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/subversion.py b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/subversion.py new file mode 100644 index 0000000..b5b6fd5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/subversion.py @@ -0,0 +1,318 @@ +import logging +import os +import re +from typing import List, Optional, Tuple + +from pip._internal.utils.misc import ( + HiddenText, + display_path, + is_console_interactive, + is_installable_dir, + split_auth_from_netloc, +) +from pip._internal.utils.subprocess import CommandArgs, make_command +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + vcs, +) + +logger = logging.getLogger(__name__) + +_svn_xml_url_re = re.compile('url="([^"]+)"') +_svn_rev_re = re.compile(r'committed-rev="(\d+)"') +_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') +_svn_info_xml_url_re = re.compile(r"(.*)") + + +class Subversion(VersionControl): + name = "svn" + dirname = ".svn" + repo_name = "checkout" + schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file") + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url: str) -> bool: + return True + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return ["-r", rev] + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the maximum revision for all files under a given location + """ + # Note: taken from setuptools.command.egg_info + revision = 0 + + for base, dirs, _ in os.walk(location): + if cls.dirname not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove(cls.dirname) + entries_fn = os.path.join(base, cls.dirname, "entries") + if not os.path.exists(entries_fn): + # FIXME: should we warn? + continue + + dirurl, localrev = cls._get_svn_url_rev(base) + + if base == location: + assert dirurl is not None + base = dirurl + "/" # save the root url + elif not dirurl or not dirurl.startswith(base): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + return str(revision) + + @classmethod + def get_netloc_and_auth( + cls, netloc: str, scheme: str + ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]: + """ + This override allows the auth information to be passed to svn via the + --username and --password options instead of via the URL. + """ + if scheme == "ssh": + # The --username and --password options can't be used for + # svn+ssh URLs, so keep the auth information in the URL. + return super().get_netloc_and_auth(netloc, scheme) + + return split_auth_from_netloc(netloc) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it + url, rev, user_pass = super().get_url_rev_and_auth(url) + if url.startswith("ssh://"): + url = "svn+" + url + return url, rev, user_pass + + @staticmethod + def make_rev_args( + username: Optional[str], password: Optional[HiddenText] + ) -> CommandArgs: + extra_args: CommandArgs = [] + if username: + extra_args += ["--username", username] + if password: + extra_args += ["--password", password] + + return extra_args + + @classmethod + def get_remote_url(cls, location: str) -> str: + # In cases where the source is in a subdirectory, we have to look up in + # the location until we find a valid project root. + orig_location = location + while not is_installable_dir(location): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding a Python project. + logger.warning( + "Could not find Python project for directory %s (tried all " + "parent directories)", + orig_location, + ) + raise RemoteNotFoundError + + url, _rev = cls._get_svn_url_rev(location) + if url is None: + raise RemoteNotFoundError + + return url + + @classmethod + def _get_svn_url_rev(cls, location: str) -> Tuple[Optional[str], int]: + from pip._internal.exceptions import InstallationError + + entries_path = os.path.join(location, cls.dirname, "entries") + if os.path.exists(entries_path): + with open(entries_path) as f: + data = f.read() + else: # subversion >= 1.7 does not have the 'entries' file + data = "" + + url = None + if data.startswith("8") or data.startswith("9") or data.startswith("10"): + entries = list(map(str.splitlines, data.split("\n\x0c\n"))) + del entries[0][0] # get rid of the '8' + url = entries[0][3] + revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0] + elif data.startswith("= 1.7 + # Note that using get_remote_call_options is not necessary here + # because `svn info` is being run against a local directory. + # We don't need to worry about making sure interactive mode + # is being used to prompt for passwords, because passwords + # are only potentially needed for remote server requests. + xml = cls.run_command( + ["info", "--xml", location], + show_stdout=False, + stdout_only=True, + ) + match = _svn_info_xml_url_re.search(xml) + assert match is not None + url = match.group(1) + revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)] + except InstallationError: + url, revs = None, [] + + if revs: + rev = max(revs) + else: + rev = 0 + + return url, rev + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """Always assume the versions don't match""" + return False + + def __init__(self, use_interactive: bool = None) -> None: + if use_interactive is None: + use_interactive = is_console_interactive() + self.use_interactive = use_interactive + + # This member is used to cache the fetched version of the current + # ``svn`` client. + # Special value definitions: + # None: Not evaluated yet. + # Empty tuple: Could not parse version. + self._vcs_version: Optional[Tuple[int, ...]] = None + + super().__init__() + + def call_vcs_version(self) -> Tuple[int, ...]: + """Query the version of the currently installed Subversion client. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + # Example versions: + # svn, version 1.10.3 (r1842928) + # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0 + # svn, version 1.7.14 (r1542130) + # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu + # svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0) + # compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2 + version_prefix = "svn, version " + version = self.run_command(["--version"], show_stdout=False, stdout_only=True) + if not version.startswith(version_prefix): + return () + + version = version[len(version_prefix) :].split()[0] + version_list = version.partition("-")[0].split(".") + try: + parsed_version = tuple(map(int, version_list)) + except ValueError: + return () + + return parsed_version + + def get_vcs_version(self) -> Tuple[int, ...]: + """Return the version of the currently installed Subversion client. + + If the version of the Subversion client has already been queried, + a cached value will be used. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + if self._vcs_version is not None: + # Use cached version, if available. + # If parsing the version failed previously (empty tuple), + # do not attempt to parse it again. + return self._vcs_version + + vcs_version = self.call_vcs_version() + self._vcs_version = vcs_version + return vcs_version + + def get_remote_call_options(self) -> CommandArgs: + """Return options to be used on calls to Subversion that contact the server. + + These options are applicable for the following ``svn`` subcommands used + in this class. + + - checkout + - switch + - update + + :return: A list of command line arguments to pass to ``svn``. + """ + if not self.use_interactive: + # --non-interactive switch is available since Subversion 0.14.4. + # Subversion < 1.8 runs in interactive mode by default. + return ["--non-interactive"] + + svn_version = self.get_vcs_version() + # By default, Subversion >= 1.8 runs in non-interactive mode if + # stdin is not a TTY. Since that is how pip invokes SVN, in + # call_subprocess(), pip must pass --force-interactive to ensure + # the user can be prompted for a password, if required. + # SVN added the --force-interactive option in SVN 1.8. Since + # e.g. RHEL/CentOS 7, which is supported until 2024, ships with + # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip + # can't safely add the option if the SVN version is < 1.8 (or unknown). + if svn_version >= (1, 8): + return ["--force-interactive"] + + return [] + + def fetch_new(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + rev_display = rev_options.to_display() + logger.info( + "Checking out %s%s to %s", + url, + rev_display, + display_path(dest), + ) + cmd_args = make_command( + "checkout", + "-q", + self.get_remote_call_options(), + rev_options.to_args(), + url, + dest, + ) + self.run_command(cmd_args) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + cmd_args = make_command( + "switch", + self.get_remote_call_options(), + rev_options.to_args(), + url, + dest, + ) + self.run_command(cmd_args) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + cmd_args = make_command( + "update", + self.get_remote_call_options(), + rev_options.to_args(), + dest, + ) + self.run_command(cmd_args) + + +vcs.register(Subversion) diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/vcs/versioncontrol.py b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/versioncontrol.py new file mode 100644 index 0000000..1139051 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/vcs/versioncontrol.py @@ -0,0 +1,693 @@ +"""Handles all VCS (version control) support""" + +import logging +import os +import shutil +import sys +import urllib.parse +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Tuple, + Type, + Union, +) + +from pip._internal.cli.spinners import SpinnerInterface +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import ( + HiddenText, + ask_path_exists, + backup_dir, + display_path, + hide_url, + hide_value, + is_installable_dir, + rmtree, +) +from pip._internal.utils.subprocess import CommandArgs, call_subprocess, make_command +from pip._internal.utils.urls import get_url_scheme + +if TYPE_CHECKING: + # Literal was introduced in Python 3.8. + # + # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7. + from typing import Literal + + +__all__ = ["vcs"] + + +logger = logging.getLogger(__name__) + +AuthInfo = Tuple[Optional[str], Optional[str]] + + +def is_url(name: str) -> bool: + """ + Return true if the name looks like a URL. + """ + scheme = get_url_scheme(name) + if scheme is None: + return False + return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes + + +def make_vcs_requirement_url( + repo_url: str, rev: str, project_name: str, subdir: Optional[str] = None +) -> str: + """ + Return the URL for a VCS requirement. + + Args: + repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). + project_name: the (unescaped) project name. + """ + egg_project_name = project_name.replace("-", "_") + req = f"{repo_url}@{rev}#egg={egg_project_name}" + if subdir: + req += f"&subdirectory={subdir}" + + return req + + +def find_path_to_project_root_from_repo_root( + location: str, repo_root: str +) -> Optional[str]: + """ + Find the the Python project's root by searching up the filesystem from + `location`. Return the path to project root relative to `repo_root`. + Return None if the project root is `repo_root`, or cannot be found. + """ + # find project root. + orig_location = location + while not is_installable_dir(location): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding a Python project. + logger.warning( + "Could not find a Python project for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + if os.path.samefile(repo_root, location): + return None + + return os.path.relpath(location, repo_root) + + +class RemoteNotFoundError(Exception): + pass + + +class RemoteNotValidError(Exception): + def __init__(self, url: str): + super().__init__(url) + self.url = url + + +class RevOptions: + + """ + Encapsulates a VCS-specific revision to install, along with any VCS + install options. + + Instances of this class should be treated as if immutable. + """ + + def __init__( + self, + vc_class: Type["VersionControl"], + rev: Optional[str] = None, + extra_args: Optional[CommandArgs] = None, + ) -> None: + """ + Args: + vc_class: a VersionControl subclass. + rev: the name of the revision to install. + extra_args: a list of extra options. + """ + if extra_args is None: + extra_args = [] + + self.extra_args = extra_args + self.rev = rev + self.vc_class = vc_class + self.branch_name: Optional[str] = None + + def __repr__(self) -> str: + return f"" + + @property + def arg_rev(self) -> Optional[str]: + if self.rev is None: + return self.vc_class.default_arg_rev + + return self.rev + + def to_args(self) -> CommandArgs: + """ + Return the VCS-specific command arguments. + """ + args: CommandArgs = [] + rev = self.arg_rev + if rev is not None: + args += self.vc_class.get_base_rev_args(rev) + args += self.extra_args + + return args + + def to_display(self) -> str: + if not self.rev: + return "" + + return f" (to revision {self.rev})" + + def make_new(self, rev: str) -> "RevOptions": + """ + Make a copy of the current instance, but with a new rev. + + Args: + rev: the name of the revision for the new object. + """ + return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) + + +class VcsSupport: + _registry: Dict[str, "VersionControl"] = {} + schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"] + + def __init__(self) -> None: + # Register more schemes with urlparse for various version control + # systems + urllib.parse.uses_netloc.extend(self.schemes) + super().__init__() + + def __iter__(self) -> Iterator[str]: + return self._registry.__iter__() + + @property + def backends(self) -> List["VersionControl"]: + return list(self._registry.values()) + + @property + def dirnames(self) -> List[str]: + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self) -> List[str]: + schemes: List[str] = [] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls: Type["VersionControl"]) -> None: + if not hasattr(cls, "name"): + logger.warning("Cannot register VCS %s", cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls() + logger.debug("Registered VCS backend: %s", cls.name) + + def unregister(self, name: str) -> None: + if name in self._registry: + del self._registry[name] + + def get_backend_for_dir(self, location: str) -> Optional["VersionControl"]: + """ + Return a VersionControl object if a repository of that type is found + at the given directory. + """ + vcs_backends = {} + for vcs_backend in self._registry.values(): + repo_path = vcs_backend.get_repository_root(location) + if not repo_path: + continue + logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name) + vcs_backends[repo_path] = vcs_backend + + if not vcs_backends: + return None + + # Choose the VCS in the inner-most directory. Since all repository + # roots found here would be either `location` or one of its + # parents, the longest path should have the most path components, + # i.e. the backend representing the inner-most repository. + inner_most_repo_path = max(vcs_backends, key=len) + return vcs_backends[inner_most_repo_path] + + def get_backend_for_scheme(self, scheme: str) -> Optional["VersionControl"]: + """ + Return a VersionControl object or None. + """ + for vcs_backend in self._registry.values(): + if scheme in vcs_backend.schemes: + return vcs_backend + return None + + def get_backend(self, name: str) -> Optional["VersionControl"]: + """ + Return a VersionControl object or None. + """ + name = name.lower() + return self._registry.get(name) + + +vcs = VcsSupport() + + +class VersionControl: + name = "" + dirname = "" + repo_name = "" + # List of supported schemes for this Version Control + schemes: Tuple[str, ...] = () + # Iterable of environment variable names to pass to call_subprocess(). + unset_environ: Tuple[str, ...] = () + default_arg_rev: Optional[str] = None + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url: str) -> bool: + """ + Return whether the vcs prefix (e.g. "git+") should be added to a + repository's remote url when used in a requirement. + """ + return not remote_url.lower().startswith(f"{cls.name}:") + + @classmethod + def get_subdirectory(cls, location: str) -> Optional[str]: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + return None + + @classmethod + def get_requirement_revision(cls, repo_dir: str) -> str: + """ + Return the revision string that should be used in a requirement. + """ + return cls.get_revision(repo_dir) + + @classmethod + def get_src_requirement(cls, repo_dir: str, project_name: str) -> str: + """ + Return the requirement string to use to redownload the files + currently at the given repository directory. + + Args: + project_name: the (unescaped) project name. + + The return value has a form similar to the following: + + {repository_url}@{revision}#egg={project_name} + """ + repo_url = cls.get_remote_url(repo_dir) + + if cls.should_add_vcs_url_prefix(repo_url): + repo_url = f"{cls.name}+{repo_url}" + + revision = cls.get_requirement_revision(repo_dir) + subdir = cls.get_subdirectory(repo_dir) + req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir) + + return req + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + """ + Return the base revision arguments for a vcs command. + + Args: + rev: the name of a revision to install. Cannot be None. + """ + raise NotImplementedError + + def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: + """ + Return true if the commit hash checked out at dest matches + the revision in url. + + Always return False, if the VCS does not support immutable commit + hashes. + + This method does not check if there are local uncommitted changes + in dest after checkout, as pip currently has no use case for that. + """ + return False + + @classmethod + def make_rev_options( + cls, rev: Optional[str] = None, extra_args: Optional[CommandArgs] = None + ) -> RevOptions: + """ + Return a RevOptions object. + + Args: + rev: the name of a revision to install. + extra_args: a list of extra options. + """ + return RevOptions(cls, rev, extra_args=extra_args) + + @classmethod + def _is_local_repository(cls, repo: str) -> bool: + """ + posix absolute paths start with os.path.sep, + win32 ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or bool(drive) + + @classmethod + def get_netloc_and_auth( + cls, netloc: str, scheme: str + ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]: + """ + Parse the repository URL's netloc, and return the new netloc to use + along with auth information. + + Args: + netloc: the original repository URL netloc. + scheme: the repository URL's scheme without the vcs prefix. + + This is mainly for the Subversion class to override, so that auth + information can be provided via the --username and --password options + instead of through the URL. For other subclasses like Git without + such an option, auth information must stay in the URL. + + Returns: (netloc, (username, password)). + """ + return netloc, (None, None) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + """ + Parse the repository URL to use, and return the URL, revision, + and auth info to use. + + Returns: (url, rev, (username, password)). + """ + scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) + if "+" not in scheme: + raise ValueError( + "Sorry, {!r} is a malformed VCS url. " + "The format is +://, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) + ) + # Remove the vcs prefix. + scheme = scheme.split("+", 1)[1] + netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) + rev = None + if "@" in path: + path, rev = path.rsplit("@", 1) + if not rev: + raise InstallationError( + "The URL {!r} has an empty revision (after @) " + "which is not supported. Include a revision after @ " + "or remove @ from the URL.".format(url) + ) + url = urllib.parse.urlunsplit((scheme, netloc, path, query, "")) + return url, rev, user_pass + + @staticmethod + def make_rev_args( + username: Optional[str], password: Optional[HiddenText] + ) -> CommandArgs: + """ + Return the RevOptions "extra arguments" to use in obtain(). + """ + return [] + + def get_url_rev_options(self, url: HiddenText) -> Tuple[HiddenText, RevOptions]: + """ + Return the URL and RevOptions object to use in obtain(), + as a tuple (url, rev_options). + """ + secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret) + username, secret_password = user_pass + password: Optional[HiddenText] = None + if secret_password is not None: + password = hide_value(secret_password) + extra_args = self.make_rev_args(username, password) + rev_options = self.make_rev_options(rev, extra_args=extra_args) + + return hide_url(secret_url), rev_options + + @staticmethod + def normalize_url(url: str) -> str: + """ + Normalize a URL for comparison by unquoting it and removing any + trailing slash. + """ + return urllib.parse.unquote(url).rstrip("/") + + @classmethod + def compare_urls(cls, url1: str, url2: str) -> bool: + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return cls.normalize_url(url1) == cls.normalize_url(url2) + + def fetch_new(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + """ + Fetch a revision from a repository, in the case that this is the + first fetch from the repository. + + Args: + dest: the directory to fetch the repository to. + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + """ + Switch the repo at ``dest`` to point to ``URL``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + """ + Update an already-existing repo to the given ``rev_options``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """ + Return whether the id of the current commit equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + raise NotImplementedError + + def obtain(self, dest: str, url: HiddenText) -> None: + """ + Install or update in editable mode the package represented by this + VersionControl object. + + :param dest: the repository directory in which to install or update. + :param url: the repository URL starting with a vcs prefix. + """ + url, rev_options = self.get_url_rev_options(url) + + if not os.path.exists(dest): + self.fetch_new(dest, url, rev_options) + return + + rev_display = rev_options.to_display() + if self.is_repository_directory(dest): + existing_url = self.get_remote_url(dest) + if self.compare_urls(existing_url, url.secret): + logger.debug( + "%s in %s exists, and has correct URL (%s)", + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.is_commit_id_equal(dest, rev_options.rev): + logger.info( + "Updating %s %s%s", + display_path(dest), + self.repo_name, + rev_display, + ) + self.update(dest, url, rev_options) + else: + logger.info("Skipping because already up-to-date.") + return + + logger.warning( + "%s %s in %s exists with URL %s", + self.name, + self.repo_name, + display_path(dest), + existing_url, + ) + prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b")) + else: + logger.warning( + "Directory %s already exists, and is not a %s %s.", + dest, + self.name, + self.repo_name, + ) + # https://github.com/python/mypy/issues/1174 + prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b")) # type: ignore + + logger.warning( + "The plan is to install the %s repository %s", + self.name, + url, + ) + response = ask_path_exists("What to do? {}".format(prompt[0]), prompt[1]) + + if response == "a": + sys.exit(-1) + + if response == "w": + logger.warning("Deleting %s", display_path(dest)) + rmtree(dest) + self.fetch_new(dest, url, rev_options) + return + + if response == "b": + dest_dir = backup_dir(dest) + logger.warning("Backing up %s to %s", display_path(dest), dest_dir) + shutil.move(dest, dest_dir) + self.fetch_new(dest, url, rev_options) + return + + # Do nothing if the response is "i". + if response == "s": + logger.info( + "Switching %s %s to %s%s", + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options) + + def unpack(self, location: str, url: HiddenText) -> None: + """ + Clean up current location and download the url repository + (and vcs infos) into location + + :param url: the repository URL starting with a vcs prefix. + """ + if os.path.exists(location): + rmtree(location) + self.obtain(location, url=url) + + @classmethod + def get_remote_url(cls, location: str) -> str: + """ + Return the url used at location + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + raise NotImplementedError + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the current commit id of the files at the given location. + """ + raise NotImplementedError + + @classmethod + def run_command( + cls, + cmd: Union[List[str], CommandArgs], + show_stdout: bool = True, + cwd: Optional[str] = None, + on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise", + extra_ok_returncodes: Optional[Iterable[int]] = None, + command_desc: Optional[str] = None, + extra_environ: Optional[Mapping[str, Any]] = None, + spinner: Optional[SpinnerInterface] = None, + log_failed_cmd: bool = True, + stdout_only: bool = False, + ) -> str: + """ + Run a VCS subcommand + This is simply a wrapper around call_subprocess that adds the VCS + command name, and checks that the VCS is available + """ + cmd = make_command(cls.name, *cmd) + try: + return call_subprocess( + cmd, + show_stdout, + cwd, + on_returncode=on_returncode, + extra_ok_returncodes=extra_ok_returncodes, + command_desc=command_desc, + extra_environ=extra_environ, + unset_environ=cls.unset_environ, + spinner=spinner, + log_failed_cmd=log_failed_cmd, + stdout_only=stdout_only, + ) + except FileNotFoundError: + # errno.ENOENT = no such file or directory + # In other words, the VCS executable isn't available + raise BadCommand( + f"Cannot find command {cls.name!r} - do you have " + f"{cls.name!r} installed and in your PATH?" + ) + except PermissionError: + # errno.EACCES = Permission denied + # This error occurs, for instance, when the command is installed + # only for another user. So, the current user don't have + # permission to call the other user command. + raise BadCommand( + f"No permission to execute {cls.name!r} - install it " + f"locally, globally (ask admin), or check your PATH. " + f"See possible solutions at " + f"https://pip.pypa.io/en/latest/reference/pip_freeze/" + f"#fixing-permission-denied." + ) + + @classmethod + def is_repository_directory(cls, path: str) -> bool: + """ + Return whether a directory path is a repository directory. + """ + logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name) + return os.path.exists(os.path.join(path, cls.dirname)) + + @classmethod + def get_repository_root(cls, location: str) -> Optional[str]: + """ + Return the "root" (top-level) directory controlled by the vcs, + or `None` if the directory is not in any. + + It is meant to be overridden to implement smarter detection + mechanisms for specific vcs. + + This can do more than is_repository_directory() alone. For + example, the Git override checks that Git is actually available. + """ + if cls.is_repository_directory(location): + return location + return None diff --git a/.venv/lib/python3.9/site-packages/pip/_internal/wheel_builder.py b/.venv/lib/python3.9/site-packages/pip/_internal/wheel_builder.py new file mode 100644 index 0000000..a9123a0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_internal/wheel_builder.py @@ -0,0 +1,375 @@ +"""Orchestrator for building wheels from InstallRequirements. +""" + +import logging +import os.path +import re +import shutil +from typing import Any, Callable, Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version +from pip._vendor.packaging.version import InvalidVersion, Version + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel +from pip._internal.metadata import FilesystemWheel, get_wheel_distribution +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.build.wheel import build_wheel_pep517 +from pip._internal.operations.build.wheel_editable import build_wheel_editable +from pip._internal.operations.build.wheel_legacy import build_wheel_legacy +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed +from pip._internal.utils.setuptools_build import make_setuptools_clean_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + +_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE) + +BinaryAllowedPredicate = Callable[[InstallRequirement], bool] +BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] + + +def _contains_egg_info(s: str) -> bool: + """Determine whether the string looks like an egg_info. + + :param s: The string to parse. E.g. foo-2.1 + """ + return bool(_egg_info_re.search(s)) + + +def _should_build( + req: InstallRequirement, + need_wheel: bool, + check_binary_allowed: BinaryAllowedPredicate, +) -> bool: + """Return whether an InstallRequirement should be built into a wheel.""" + if req.constraint: + # never build requirements that are merely constraints + return False + if req.is_wheel: + if need_wheel: + logger.info( + "Skipping %s, due to already being wheel.", + req.name, + ) + return False + + if need_wheel: + # i.e. pip wheel, not pip install + return True + + # From this point, this concerns the pip install command only + # (need_wheel=False). + + if not req.source_dir: + return False + + if req.editable: + # we only build PEP 660 editable requirements + return req.supports_pyproject_editable() + + if req.use_pep517: + return True + + if not check_binary_allowed(req): + logger.info( + "Skipping wheel build for %s, due to binaries being disabled for it.", + req.name, + ) + return False + + if not is_wheel_installed(): + # we don't build legacy requirements if wheel is not installed + logger.info( + "Using legacy 'setup.py install' for %s, " + "since package 'wheel' is not installed.", + req.name, + ) + return False + + return True + + +def should_build_for_wheel_command( + req: InstallRequirement, +) -> bool: + return _should_build(req, need_wheel=True, check_binary_allowed=_always_true) + + +def should_build_for_install_command( + req: InstallRequirement, + check_binary_allowed: BinaryAllowedPredicate, +) -> bool: + return _should_build( + req, need_wheel=False, check_binary_allowed=check_binary_allowed + ) + + +def _should_cache( + req: InstallRequirement, +) -> Optional[bool]: + """ + Return whether a built InstallRequirement can be stored in the persistent + wheel cache, assuming the wheel cache is available, and _should_build() + has determined a wheel needs to be built. + """ + if req.editable or not req.source_dir: + # never cache editable requirements + return False + + if req.link and req.link.is_vcs: + # VCS checkout. Do not cache + # unless it points to an immutable commit hash. + assert not req.editable + assert req.source_dir + vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) + assert vcs_backend + if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): + return True + return False + + assert req.link + base, ext = req.link.splitext() + if _contains_egg_info(base): + return True + + # Otherwise, do not cache. + return False + + +def _get_cache_dir( + req: InstallRequirement, + wheel_cache: WheelCache, +) -> str: + """Return the persistent or temporary cache directory where the built + wheel need to be stored. + """ + cache_available = bool(wheel_cache.cache_dir) + assert req.link + if cache_available and _should_cache(req): + cache_dir = wheel_cache.get_path_for_link(req.link) + else: + cache_dir = wheel_cache.get_ephem_path_for_link(req.link) + return cache_dir + + +def _always_true(_: Any) -> bool: + return True + + +def _verify_one(req: InstallRequirement, wheel_path: str) -> None: + canonical_name = canonicalize_name(req.name or "") + w = Wheel(os.path.basename(wheel_path)) + if canonicalize_name(w.name) != canonical_name: + raise InvalidWheelFilename( + "Wheel has unexpected file name: expected {!r}, " + "got {!r}".format(canonical_name, w.name), + ) + dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name) + dist_verstr = str(dist.version) + if canonicalize_version(dist_verstr) != canonicalize_version(w.version): + raise InvalidWheelFilename( + "Wheel has unexpected file name: expected {!r}, " + "got {!r}".format(dist_verstr, w.version), + ) + metadata_version_value = dist.metadata_version + if metadata_version_value is None: + raise UnsupportedWheel("Missing Metadata-Version") + try: + metadata_version = Version(metadata_version_value) + except InvalidVersion: + msg = f"Invalid Metadata-Version: {metadata_version_value}" + raise UnsupportedWheel(msg) + if metadata_version >= Version("1.2") and not isinstance(dist.version, Version): + raise UnsupportedWheel( + "Metadata 1.2 mandates PEP 440 version, " + "but {!r} is not".format(dist_verstr) + ) + + +def _build_one( + req: InstallRequirement, + output_dir: str, + verify: bool, + build_options: List[str], + global_options: List[str], + editable: bool, +) -> Optional[str]: + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + artifact = "editable" if editable else "wheel" + try: + ensure_dir(output_dir) + except OSError as e: + logger.warning( + "Building %s for %s failed: %s", + artifact, + req.name, + e, + ) + return None + + # Install build deps into temporary directory (PEP 518) + with req.build_env: + wheel_path = _build_one_inside_env( + req, output_dir, build_options, global_options, editable + ) + if wheel_path and verify: + try: + _verify_one(req, wheel_path) + except (InvalidWheelFilename, UnsupportedWheel) as e: + logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e) + return None + return wheel_path + + +def _build_one_inside_env( + req: InstallRequirement, + output_dir: str, + build_options: List[str], + global_options: List[str], + editable: bool, +) -> Optional[str]: + with TempDirectory(kind="wheel") as temp_dir: + assert req.name + if req.use_pep517: + assert req.metadata_directory + assert req.pep517_backend + if global_options: + logger.warning( + "Ignoring --global-option when building %s using PEP 517", req.name + ) + if build_options: + logger.warning( + "Ignoring --build-option when building %s using PEP 517", req.name + ) + if editable: + wheel_path = build_wheel_editable( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_pep517( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_legacy( + name=req.name, + setup_py_path=req.setup_py_path, + source_dir=req.unpacked_source_directory, + global_options=global_options, + build_options=build_options, + tempd=temp_dir.path, + ) + + if wheel_path is not None: + wheel_name = os.path.basename(wheel_path) + dest_path = os.path.join(output_dir, wheel_name) + try: + wheel_hash, length = hash_file(wheel_path) + shutil.move(wheel_path, dest_path) + logger.info( + "Created wheel for %s: filename=%s size=%d sha256=%s", + req.name, + wheel_name, + length, + wheel_hash.hexdigest(), + ) + logger.info("Stored in directory: %s", output_dir) + return dest_path + except Exception as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, + e, + ) + # Ignore return, we can't do anything else useful. + if not req.use_pep517: + _clean_one_legacy(req, global_options) + return None + + +def _clean_one_legacy(req: InstallRequirement, global_options: List[str]) -> bool: + clean_args = make_setuptools_clean_args( + req.setup_py_path, + global_options=global_options, + ) + + logger.info("Running setup.py clean for %s", req.name) + try: + call_subprocess(clean_args, cwd=req.source_dir) + return True + except Exception: + logger.error("Failed cleaning build dir for %s", req.name) + return False + + +def build( + requirements: Iterable[InstallRequirement], + wheel_cache: WheelCache, + verify: bool, + build_options: List[str], + global_options: List[str], +) -> BuildResult: + """Build wheels. + + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ + if not requirements: + return [], [] + + # Build the wheels. + logger.info( + "Building wheels for collected packages: %s", + ", ".join(req.name for req in requirements), # type: ignore + ) + + with indent_log(): + build_successes, build_failures = [], [] + for req in requirements: + assert req.name + cache_dir = _get_cache_dir(req, wheel_cache) + wheel_file = _build_one( + req, + cache_dir, + verify, + build_options, + global_options, + req.editable and req.permit_editable_wheels, + ) + if wheel_file: + # Update the link for this. + req.link = Link(path_to_url(wheel_file)) + req.local_file_path = req.link.file_path + assert req.link.is_wheel + build_successes.append(req) + else: + build_failures.append(req) + + # notify success/failure + if build_successes: + logger.info( + "Successfully built %s", + " ".join([req.name for req in build_successes]), # type: ignore + ) + if build_failures: + logger.info( + "Failed to build %s", + " ".join([req.name for req in build_failures]), # type: ignore + ) + # Return a list of requirements that failed to build + return build_successes, build_failures diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/__init__.py new file mode 100644 index 0000000..3843cb0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/__init__.py @@ -0,0 +1,111 @@ +""" +pip._vendor is for vendoring dependencies of pip to prevent needing pip to +depend on something external. + +Files inside of pip._vendor should be considered immutable and should only be +updated to versions from upstream. +""" +from __future__ import absolute_import + +import glob +import os.path +import sys + +# Downstream redistributors which have debundled our dependencies should also +# patch this value to be true. This will trigger the additional patching +# to cause things like "six" to be available as pip. +DEBUNDLED = False + +# By default, look in this directory for a bunch of .whl files which we will +# add to the beginning of sys.path before attempting to import anything. This +# is done to support downstream re-distributors like Debian and Fedora who +# wish to create their own Wheels for our dependencies to aid in debundling. +WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) + + +# Define a small helper function to alias our vendored modules to the real ones +# if the vendored ones do not exist. This idea of this was taken from +# https://github.com/kennethreitz/requests/pull/2567. +def vendored(modulename): + vendored_name = "{0}.{1}".format(__name__, modulename) + + try: + __import__(modulename, globals(), locals(), level=0) + except ImportError: + # We can just silently allow import failures to pass here. If we + # got to this point it means that ``import pip._vendor.whatever`` + # failed and so did ``import whatever``. Since we're importing this + # upfront in an attempt to alias imports, not erroring here will + # just mean we get a regular import error whenever pip *actually* + # tries to import one of these modules to use it, which actually + # gives us a better error message than we would have otherwise + # gotten. + pass + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) + + +# If we're operating in a debundled setup, then we want to go ahead and trigger +# the aliasing of our vendored libraries as well as looking for wheels to add +# to our sys.path. This will cause all of this code to be a no-op typically +# however downstream redistributors can enable it in a consistent way across +# all platforms. +if DEBUNDLED: + # Actually look inside of WHEEL_DIR to find .whl files and add them to the + # front of our sys.path. + sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path + + # Actually alias all of our vendored dependencies. + vendored("cachecontrol") + vendored("certifi") + vendored("colorama") + vendored("distlib") + vendored("distro") + vendored("html5lib") + vendored("six") + vendored("six.moves") + vendored("six.moves.urllib") + vendored("six.moves.urllib.parse") + vendored("packaging") + vendored("packaging.version") + vendored("packaging.specifiers") + vendored("pep517") + vendored("pkg_resources") + vendored("platformdirs") + vendored("progress") + vendored("requests") + vendored("requests.exceptions") + vendored("requests.packages") + vendored("requests.packages.urllib3") + vendored("requests.packages.urllib3._collections") + vendored("requests.packages.urllib3.connection") + vendored("requests.packages.urllib3.connectionpool") + vendored("requests.packages.urllib3.contrib") + vendored("requests.packages.urllib3.contrib.ntlmpool") + vendored("requests.packages.urllib3.contrib.pyopenssl") + vendored("requests.packages.urllib3.exceptions") + vendored("requests.packages.urllib3.fields") + vendored("requests.packages.urllib3.filepost") + vendored("requests.packages.urllib3.packages") + vendored("requests.packages.urllib3.packages.ordered_dict") + vendored("requests.packages.urllib3.packages.six") + vendored("requests.packages.urllib3.packages.ssl_match_hostname") + vendored("requests.packages.urllib3.packages.ssl_match_hostname." + "_implementation") + vendored("requests.packages.urllib3.poolmanager") + vendored("requests.packages.urllib3.request") + vendored("requests.packages.urllib3.response") + vendored("requests.packages.urllib3.util") + vendored("requests.packages.urllib3.util.connection") + vendored("requests.packages.urllib3.util.request") + vendored("requests.packages.urllib3.util.response") + vendored("requests.packages.urllib3.util.retry") + vendored("requests.packages.urllib3.util.ssl_") + vendored("requests.packages.urllib3.util.timeout") + vendored("requests.packages.urllib3.util.url") + vendored("resolvelib") + vendored("tenacity") + vendored("tomli") + vendored("urllib3") diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..449dea1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/distro.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/distro.cpython-39.pyc new file mode 100644 index 0000000..5cbd0f1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/distro.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/pyparsing.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/pyparsing.cpython-39.pyc new file mode 100644 index 0000000..cc04956 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/pyparsing.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/six.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/six.cpython-39.pyc new file mode 100644 index 0000000..f1625f3 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/six.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__init__.py new file mode 100644 index 0000000..a1bbbbe --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__init__.py @@ -0,0 +1,11 @@ +"""CacheControl import Interface. + +Make it easy to import from cachecontrol without long namespaces. +""" +__author__ = "Eric Larson" +__email__ = "eric@ionrock.org" +__version__ = "0.12.6" + +from .wrapper import CacheControl +from .adapter import CacheControlAdapter +from .controller import CacheController diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..bded337 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc new file mode 100644 index 0000000..8fee99b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc new file mode 100644 index 0000000..70a9263 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc new file mode 100644 index 0000000..26b921f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc new file mode 100644 index 0000000..4b7180c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc new file mode 100644 index 0000000..813b7be Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc new file mode 100644 index 0000000..3c713cd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc new file mode 100644 index 0000000..b24df8c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc new file mode 100644 index 0000000..9e12437 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc new file mode 100644 index 0000000..0ce7417 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/_cmd.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/_cmd.py new file mode 100644 index 0000000..f1e0ad9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/_cmd.py @@ -0,0 +1,57 @@ +import logging + +from pip._vendor import requests + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import logger + +from argparse import ArgumentParser + + +def setup_logging(): + logger.setLevel(logging.DEBUG) + handler = logging.StreamHandler() + logger.addHandler(handler) + + +def get_session(): + adapter = CacheControlAdapter( + DictCache(), cache_etags=True, serializer=None, heuristic=None + ) + sess = requests.Session() + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + sess.cache_controller = adapter.controller + return sess + + +def get_args(): + parser = ArgumentParser() + parser.add_argument("url", help="The URL to try and cache") + return parser.parse_args() + + +def main(args=None): + args = get_args() + sess = get_session() + + # Make a request to get a response + resp = sess.get(args.url) + + # Turn on logging + setup_logging() + + # try setting the cache + sess.cache_controller.cache_response(resp.request, resp.raw) + + # Now try to get it + if sess.cache_controller.cached_request(resp.request): + print("Cached!") + else: + print("Not cached :(") + + +if __name__ == "__main__": + main() diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/adapter.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/adapter.py new file mode 100644 index 0000000..815650e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -0,0 +1,133 @@ +import types +import functools +import zlib + +from pip._vendor.requests.adapters import HTTPAdapter + +from .controller import CacheController +from .cache import DictCache +from .filewrapper import CallbackFileWrapper + + +class CacheControlAdapter(HTTPAdapter): + invalidating_methods = {"PUT", "DELETE"} + + def __init__( + self, + cache=None, + cache_etags=True, + controller_class=None, + serializer=None, + heuristic=None, + cacheable_methods=None, + *args, + **kw + ): + super(CacheControlAdapter, self).__init__(*args, **kw) + self.cache = DictCache() if cache is None else cache + self.heuristic = heuristic + self.cacheable_methods = cacheable_methods or ("GET",) + + controller_factory = controller_class or CacheController + self.controller = controller_factory( + self.cache, cache_etags=cache_etags, serializer=serializer + ) + + def send(self, request, cacheable_methods=None, **kw): + """ + Send a request. Use the request information to see if it + exists in the cache and cache the response if we need to and can. + """ + cacheable = cacheable_methods or self.cacheable_methods + if request.method in cacheable: + try: + cached_response = self.controller.cached_request(request) + except zlib.error: + cached_response = None + if cached_response: + return self.build_response(request, cached_response, from_cache=True) + + # check for etags and add headers if appropriate + request.headers.update(self.controller.conditional_headers(request)) + + resp = super(CacheControlAdapter, self).send(request, **kw) + + return resp + + def build_response( + self, request, response, from_cache=False, cacheable_methods=None + ): + """ + Build a response by making a request or using the cache. + + This will end up calling send and returning a potentially + cached response + """ + cacheable = cacheable_methods or self.cacheable_methods + if not from_cache and request.method in cacheable: + # Check for any heuristics that might update headers + # before trying to cache. + if self.heuristic: + response = self.heuristic.apply(response) + + # apply any expiration heuristics + if response.status == 304: + # We must have sent an ETag request. This could mean + # that we've been expired already or that we simply + # have an etag. In either case, we want to try and + # update the cache if that is the case. + cached_response = self.controller.update_cached_response( + request, response + ) + + if cached_response is not response: + from_cache = True + + # We are done with the server response, read a + # possible response body (compliant servers will + # not return one, but we cannot be 100% sure) and + # release the connection back to the pool. + response.read(decode_content=False) + response.release_conn() + + response = cached_response + + # We always cache the 301 responses + elif response.status == 301: + self.controller.cache_response(request, response) + else: + # Wrap the response file with a wrapper that will cache the + # response when the stream has been consumed. + response._fp = CallbackFileWrapper( + response._fp, + functools.partial( + self.controller.cache_response, request, response + ), + ) + if response.chunked: + super_update_chunk_length = response._update_chunk_length + + def _update_chunk_length(self): + super_update_chunk_length() + if self.chunk_left == 0: + self._fp._close() + + response._update_chunk_length = types.MethodType( + _update_chunk_length, response + ) + + resp = super(CacheControlAdapter, self).build_response(request, response) + + # See if we should invalidate the cache. + if request.method in self.invalidating_methods and resp.ok: + cache_url = self.controller.cache_url(request.url) + self.cache.delete(cache_url) + + # Give the request a from_cache attr to let people use it + resp.from_cache = from_cache + + return resp + + def close(self): + self.cache.close() + super(CacheControlAdapter, self).close() diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/cache.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/cache.py new file mode 100644 index 0000000..94e0773 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/cache.py @@ -0,0 +1,39 @@ +""" +The cache object API for implementing caches. The default is a thread +safe in-memory dictionary. +""" +from threading import Lock + + +class BaseCache(object): + + def get(self, key): + raise NotImplementedError() + + def set(self, key, value): + raise NotImplementedError() + + def delete(self, key): + raise NotImplementedError() + + def close(self): + pass + + +class DictCache(BaseCache): + + def __init__(self, init_dict=None): + self.lock = Lock() + self.data = init_dict or {} + + def get(self, key): + return self.data.get(key, None) + + def set(self, key, value): + with self.lock: + self.data.update({key: value}) + + def delete(self, key): + with self.lock: + if key in self.data: + self.data.pop(key) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__init__.py new file mode 100644 index 0000000..0e1658f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__init__.py @@ -0,0 +1,2 @@ +from .file_cache import FileCache # noqa +from .redis_cache import RedisCache # noqa diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..b01c660 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc new file mode 100644 index 0000000..9199457 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc new file mode 100644 index 0000000..6709ea5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py new file mode 100644 index 0000000..607b945 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py @@ -0,0 +1,146 @@ +import hashlib +import os +from textwrap import dedent + +from ..cache import BaseCache +from ..controller import CacheController + +try: + FileNotFoundError +except NameError: + # py2.X + FileNotFoundError = (IOError, OSError) + + +def _secure_open_write(filename, fmode): + # We only want to write to this file, so open it in write only mode + flags = os.O_WRONLY + + # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only + # will open *new* files. + # We specify this because we want to ensure that the mode we pass is the + # mode of the file. + flags |= os.O_CREAT | os.O_EXCL + + # Do not follow symlinks to prevent someone from making a symlink that + # we follow and insecurely open a cache file. + if hasattr(os, "O_NOFOLLOW"): + flags |= os.O_NOFOLLOW + + # On Windows we'll mark this file as binary + if hasattr(os, "O_BINARY"): + flags |= os.O_BINARY + + # Before we open our file, we want to delete any existing file that is + # there + try: + os.remove(filename) + except (IOError, OSError): + # The file must not exist already, so we can just skip ahead to opening + pass + + # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a + # race condition happens between the os.remove and this line, that an + # error will be raised. Because we utilize a lockfile this should only + # happen if someone is attempting to attack us. + fd = os.open(filename, flags, fmode) + try: + return os.fdopen(fd, "wb") + + except: + # An error occurred wrapping our FD in a file object + os.close(fd) + raise + + +class FileCache(BaseCache): + + def __init__( + self, + directory, + forever=False, + filemode=0o0600, + dirmode=0o0700, + use_dir_lock=None, + lock_class=None, + ): + + if use_dir_lock is not None and lock_class is not None: + raise ValueError("Cannot use use_dir_lock and lock_class together") + + try: + from lockfile import LockFile + from lockfile.mkdirlockfile import MkdirLockFile + except ImportError: + notice = dedent( + """ + NOTE: In order to use the FileCache you must have + lockfile installed. You can install it via pip: + pip install lockfile + """ + ) + raise ImportError(notice) + + else: + if use_dir_lock: + lock_class = MkdirLockFile + + elif lock_class is None: + lock_class = LockFile + + self.directory = directory + self.forever = forever + self.filemode = filemode + self.dirmode = dirmode + self.lock_class = lock_class + + @staticmethod + def encode(x): + return hashlib.sha224(x.encode()).hexdigest() + + def _fn(self, name): + # NOTE: This method should not change as some may depend on it. + # See: https://github.com/ionrock/cachecontrol/issues/63 + hashed = self.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key): + name = self._fn(key) + try: + with open(name, "rb") as fh: + return fh.read() + + except FileNotFoundError: + return None + + def set(self, key, value): + name = self._fn(key) + + # Make sure the directory exists + try: + os.makedirs(os.path.dirname(name), self.dirmode) + except (IOError, OSError): + pass + + with self.lock_class(name) as lock: + # Write our actual file + with _secure_open_write(lock.path, self.filemode) as fh: + fh.write(value) + + def delete(self, key): + name = self._fn(key) + if not self.forever: + try: + os.remove(name) + except FileNotFoundError: + pass + + +def url_to_file_path(url, filecache): + """Return the file cache path based on the URL. + + This does not ensure the file exists! + """ + key = CacheController.cache_url(url) + return filecache._fn(key) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py new file mode 100644 index 0000000..ed705ce --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -0,0 +1,33 @@ +from __future__ import division + +from datetime import datetime +from pip._vendor.cachecontrol.cache import BaseCache + + +class RedisCache(BaseCache): + + def __init__(self, conn): + self.conn = conn + + def get(self, key): + return self.conn.get(key) + + def set(self, key, value, expires=None): + if not expires: + self.conn.set(key, value) + else: + expires = expires - datetime.utcnow() + self.conn.setex(key, int(expires.total_seconds()), value) + + def delete(self, key): + self.conn.delete(key) + + def clear(self): + """Helper for clearing all the keys in a database. Use with + caution!""" + for key in self.conn.keys(): + self.conn.delete(key) + + def close(self): + """Redis uses connection pooling, no need to close the connection.""" + pass diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/compat.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/compat.py new file mode 100644 index 0000000..33b5aed --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/compat.py @@ -0,0 +1,29 @@ +try: + from urllib.parse import urljoin +except ImportError: + from urlparse import urljoin + + +try: + import cPickle as pickle +except ImportError: + import pickle + + +# Handle the case where the requests module has been patched to not have +# urllib3 bundled as part of its source. +try: + from pip._vendor.requests.packages.urllib3.response import HTTPResponse +except ImportError: + from pip._vendor.urllib3.response import HTTPResponse + +try: + from pip._vendor.requests.packages.urllib3.util import is_fp_closed +except ImportError: + from pip._vendor.urllib3.util import is_fp_closed + +# Replicate some six behaviour +try: + text_type = unicode +except NameError: + text_type = str diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/controller.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/controller.py new file mode 100644 index 0000000..dafe55c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/controller.py @@ -0,0 +1,376 @@ +""" +The httplib2 algorithms ported for use with requests. +""" +import logging +import re +import calendar +import time +from email.utils import parsedate_tz + +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .cache import DictCache +from .serialize import Serializer + + +logger = logging.getLogger(__name__) + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + + +class CacheController(object): + """An interface to see if request should cached or not. + """ + + def __init__( + self, cache=None, cache_etags=True, serializer=None, status_codes=None + ): + self.cache = DictCache() if cache is None else cache + self.cache_etags = cache_etags + self.serializer = serializer or Serializer() + self.cacheable_status_codes = status_codes or (200, 203, 300, 301) + + @classmethod + def _urlnorm(cls, uri): + """Normalize the URL to create a safe key for the cache""" + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise Exception("Only absolute URIs are allowed. uri = %s" % uri) + + scheme = scheme.lower() + authority = authority.lower() + + if not path: + path = "/" + + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + defrag_uri = scheme + "://" + authority + request_uri + + return defrag_uri + + @classmethod + def cache_url(cls, uri): + return cls._urlnorm(uri) + + def parse_cache_control(self, headers): + known_directives = { + # https://tools.ietf.org/html/rfc7234#section-5.2 + "max-age": (int, True), + "max-stale": (int, False), + "min-fresh": (int, True), + "no-cache": (None, False), + "no-store": (None, False), + "no-transform": (None, False), + "only-if-cached": (None, False), + "must-revalidate": (None, False), + "public": (None, False), + "private": (None, False), + "proxy-revalidate": (None, False), + "s-maxage": (int, True), + } + + cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) + + retval = {} + + for cc_directive in cc_headers.split(","): + if not cc_directive.strip(): + continue + + parts = cc_directive.split("=", 1) + directive = parts[0].strip() + + try: + typ, required = known_directives[directive] + except KeyError: + logger.debug("Ignoring unknown cache-control directive: %s", directive) + continue + + if not typ or not required: + retval[directive] = None + if typ: + try: + retval[directive] = typ(parts[1].strip()) + except IndexError: + if required: + logger.debug( + "Missing value for cache-control " "directive: %s", + directive, + ) + except ValueError: + logger.debug( + "Invalid value for cache-control directive " "%s, must be %s", + directive, + typ.__name__, + ) + + return retval + + def cached_request(self, request): + """ + Return a cached response if it exists in the cache, otherwise + return False. + """ + cache_url = self.cache_url(request.url) + logger.debug('Looking up "%s" in the cache', cache_url) + cc = self.parse_cache_control(request.headers) + + # Bail out if the request insists on fresh data + if "no-cache" in cc: + logger.debug('Request header has "no-cache", cache bypassed') + return False + + if "max-age" in cc and cc["max-age"] == 0: + logger.debug('Request header has "max_age" as 0, cache bypassed') + return False + + # Request allows serving from the cache, let's see if we find something + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug("No cache entry available") + return False + + # Check whether it can be deserialized + resp = self.serializer.loads(request, cache_data) + if not resp: + logger.warning("Cache entry deserialization failed, entry ignored") + return False + + # If we have a cached 301, return it immediately. We don't + # need to test our response for other headers b/c it is + # intrinsically "cacheable" as it is Permanent. + # See: + # https://tools.ietf.org/html/rfc7231#section-6.4.2 + # + # Client can try to refresh the value by repeating the request + # with cache busting headers as usual (ie no-cache). + if resp.status == 301: + msg = ( + 'Returning cached "301 Moved Permanently" response ' + "(ignoring date and etag information)" + ) + logger.debug(msg) + return resp + + headers = CaseInsensitiveDict(resp.headers) + if not headers or "date" not in headers: + if "etag" not in headers: + # Without date or etag, the cached response can never be used + # and should be deleted. + logger.debug("Purging cached response: no date or etag") + self.cache.delete(cache_url) + logger.debug("Ignoring cached response: no date") + return False + + now = time.time() + date = calendar.timegm(parsedate_tz(headers["date"])) + current_age = max(0, now - date) + logger.debug("Current age based on date: %i", current_age) + + # TODO: There is an assumption that the result will be a + # urllib3 response object. This may not be best since we + # could probably avoid instantiating or constructing the + # response until we know we need it. + resp_cc = self.parse_cache_control(headers) + + # determine freshness + freshness_lifetime = 0 + + # Check the max-age pragma in the cache control header + if "max-age" in resp_cc: + freshness_lifetime = resp_cc["max-age"] + logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) + + # If there isn't a max-age, check for an expires header + elif "expires" in headers: + expires = parsedate_tz(headers["expires"]) + if expires is not None: + expire_time = calendar.timegm(expires) - date + freshness_lifetime = max(0, expire_time) + logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) + + # Determine if we are setting freshness limit in the + # request. Note, this overrides what was in the response. + if "max-age" in cc: + freshness_lifetime = cc["max-age"] + logger.debug( + "Freshness lifetime from request max-age: %i", freshness_lifetime + ) + + if "min-fresh" in cc: + min_fresh = cc["min-fresh"] + # adjust our current age by our min fresh + current_age += min_fresh + logger.debug("Adjusted current age from min-fresh: %i", current_age) + + # Return entry if it is fresh enough + if freshness_lifetime > current_age: + logger.debug('The response is "fresh", returning cached response') + logger.debug("%i > %i", freshness_lifetime, current_age) + return resp + + # we're not fresh. If we don't have an Etag, clear it out + if "etag" not in headers: + logger.debug('The cached response is "stale" with no etag, purging') + self.cache.delete(cache_url) + + # return the original handler + return False + + def conditional_headers(self, request): + cache_url = self.cache_url(request.url) + resp = self.serializer.loads(request, self.cache.get(cache_url)) + new_headers = {} + + if resp: + headers = CaseInsensitiveDict(resp.headers) + + if "etag" in headers: + new_headers["If-None-Match"] = headers["ETag"] + + if "last-modified" in headers: + new_headers["If-Modified-Since"] = headers["Last-Modified"] + + return new_headers + + def cache_response(self, request, response, body=None, status_codes=None): + """ + Algorithm for caching requests. + + This assumes a requests Response object. + """ + # From httplib2: Don't cache 206's since we aren't going to + # handle byte range requests + cacheable_status_codes = status_codes or self.cacheable_status_codes + if response.status not in cacheable_status_codes: + logger.debug( + "Status code %s not in %s", response.status, cacheable_status_codes + ) + return + + response_headers = CaseInsensitiveDict(response.headers) + + # If we've been given a body, our response has a Content-Length, that + # Content-Length is valid then we can check to see if the body we've + # been given matches the expected size, and if it doesn't we'll just + # skip trying to cache it. + if ( + body is not None + and "content-length" in response_headers + and response_headers["content-length"].isdigit() + and int(response_headers["content-length"]) != len(body) + ): + return + + cc_req = self.parse_cache_control(request.headers) + cc = self.parse_cache_control(response_headers) + + cache_url = self.cache_url(request.url) + logger.debug('Updating cache with response from "%s"', cache_url) + + # Delete it from the cache if we happen to have it stored there + no_store = False + if "no-store" in cc: + no_store = True + logger.debug('Response header has "no-store"') + if "no-store" in cc_req: + no_store = True + logger.debug('Request header has "no-store"') + if no_store and self.cache.get(cache_url): + logger.debug('Purging existing cache entry to honor "no-store"') + self.cache.delete(cache_url) + if no_store: + return + + # https://tools.ietf.org/html/rfc7234#section-4.1: + # A Vary header field-value of "*" always fails to match. + # Storing such a response leads to a deserialization warning + # during cache lookup and is not allowed to ever be served, + # so storing it can be avoided. + if "*" in response_headers.get("vary", ""): + logger.debug('Response header has "Vary: *"') + return + + # If we've been given an etag, then keep the response + if self.cache_etags and "etag" in response_headers: + logger.debug("Caching due to etag") + self.cache.set( + cache_url, self.serializer.dumps(request, response, body=body) + ) + + # Add to the cache any 301s. We do this before looking that + # the Date headers. + elif response.status == 301: + logger.debug("Caching permanant redirect") + self.cache.set(cache_url, self.serializer.dumps(request, response)) + + # Add to the cache if the response headers demand it. If there + # is no date header then we can't do anything about expiring + # the cache. + elif "date" in response_headers: + # cache when there is a max-age > 0 + if "max-age" in cc and cc["max-age"] > 0: + logger.debug("Caching b/c date exists and max-age > 0") + self.cache.set( + cache_url, self.serializer.dumps(request, response, body=body) + ) + + # If the request can expire, it means we should cache it + # in the meantime. + elif "expires" in response_headers: + if response_headers["expires"]: + logger.debug("Caching b/c of expires header") + self.cache.set( + cache_url, self.serializer.dumps(request, response, body=body) + ) + + def update_cached_response(self, request, response): + """On a 304 we will get a new set of headers that we want to + update our cached value with, assuming we have one. + + This should only ever be called when we've sent an ETag and + gotten a 304 as the response. + """ + cache_url = self.cache_url(request.url) + + cached_response = self.serializer.loads(request, self.cache.get(cache_url)) + + if not cached_response: + # we didn't have a cached response + return response + + # Lets update our headers with the headers from the new request: + # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 + # + # The server isn't supposed to send headers that would make + # the cached body invalid. But... just in case, we'll be sure + # to strip out ones we know that might be problmatic due to + # typical assumptions. + excluded_headers = ["content-length"] + + cached_response.headers.update( + dict( + (k, v) + for k, v in response.headers.items() + if k.lower() not in excluded_headers + ) + ) + + # we want a 200 b/c we have content via the cache + cached_response.status = 200 + + # update our cache + self.cache.set(cache_url, self.serializer.dumps(request, cached_response)) + + return cached_response diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/filewrapper.py new file mode 100644 index 0000000..30ed4c5 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/filewrapper.py @@ -0,0 +1,80 @@ +from io import BytesIO + + +class CallbackFileWrapper(object): + """ + Small wrapper around a fp object which will tee everything read into a + buffer, and when that file is closed it will execute a callback with the + contents of that buffer. + + All attributes are proxied to the underlying file object. + + This class uses members with a double underscore (__) leading prefix so as + not to accidentally shadow an attribute. + """ + + def __init__(self, fp, callback): + self.__buf = BytesIO() + self.__fp = fp + self.__callback = callback + + def __getattr__(self, name): + # The vaguaries of garbage collection means that self.__fp is + # not always set. By using __getattribute__ and the private + # name[0] allows looking up the attribute value and raising an + # AttributeError when it doesn't exist. This stop thigns from + # infinitely recursing calls to getattr in the case where + # self.__fp hasn't been set. + # + # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers + fp = self.__getattribute__("_CallbackFileWrapper__fp") + return getattr(fp, name) + + def __is_fp_closed(self): + try: + return self.__fp.fp is None + + except AttributeError: + pass + + try: + return self.__fp.closed + + except AttributeError: + pass + + # We just don't cache it then. + # TODO: Add some logging here... + return False + + def _close(self): + if self.__callback: + self.__callback(self.__buf.getvalue()) + + # We assign this to None here, because otherwise we can get into + # really tricky problems where the CPython interpreter dead locks + # because the callback is holding a reference to something which + # has a __del__ method. Setting this to None breaks the cycle + # and allows the garbage collector to do it's thing normally. + self.__callback = None + + def read(self, amt=None): + data = self.__fp.read(amt) + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data + + def _safe_read(self, amt): + data = self.__fp._safe_read(amt) + if amt == 2 and data == b"\r\n": + # urllib executes this read to toss the CRLF at the end + # of the chunk. + return data + + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/heuristics.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/heuristics.py new file mode 100644 index 0000000..6c0e979 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/heuristics.py @@ -0,0 +1,135 @@ +import calendar +import time + +from email.utils import formatdate, parsedate, parsedate_tz + +from datetime import datetime, timedelta + +TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" + + +def expire_after(delta, date=None): + date = date or datetime.utcnow() + return date + delta + + +def datetime_to_header(dt): + return formatdate(calendar.timegm(dt.timetuple())) + + +class BaseHeuristic(object): + + def warning(self, response): + """ + Return a valid 1xx warning header value describing the cache + adjustments. + + The response is provided too allow warnings like 113 + http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need + to explicitly say response is over 24 hours old. + """ + return '110 - "Response is Stale"' + + def update_headers(self, response): + """Update the response headers with any new headers. + + NOTE: This SHOULD always include some Warning header to + signify that the response was cached by the client, not + by way of the provided headers. + """ + return {} + + def apply(self, response): + updated_headers = self.update_headers(response) + + if updated_headers: + response.headers.update(updated_headers) + warning_header_value = self.warning(response) + if warning_header_value is not None: + response.headers.update({"Warning": warning_header_value}) + + return response + + +class OneDayCache(BaseHeuristic): + """ + Cache the response by providing an expires 1 day in the + future. + """ + + def update_headers(self, response): + headers = {} + + if "expires" not in response.headers: + date = parsedate(response.headers["date"]) + expires = expire_after(timedelta(days=1), date=datetime(*date[:6])) + headers["expires"] = datetime_to_header(expires) + headers["cache-control"] = "public" + return headers + + +class ExpiresAfter(BaseHeuristic): + """ + Cache **all** requests for a defined time period. + """ + + def __init__(self, **kw): + self.delta = timedelta(**kw) + + def update_headers(self, response): + expires = expire_after(self.delta) + return {"expires": datetime_to_header(expires), "cache-control": "public"} + + def warning(self, response): + tmpl = "110 - Automatically cached for %s. Response might be stale" + return tmpl % self.delta + + +class LastModified(BaseHeuristic): + """ + If there is no Expires header already, fall back on Last-Modified + using the heuristic from + http://tools.ietf.org/html/rfc7234#section-4.2.2 + to calculate a reasonable value. + + Firefox also does something like this per + https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ + http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 + Unlike mozilla we limit this to 24-hr. + """ + cacheable_by_default_statuses = { + 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 + } + + def update_headers(self, resp): + headers = resp.headers + + if "expires" in headers: + return {} + + if "cache-control" in headers and headers["cache-control"] != "public": + return {} + + if resp.status not in self.cacheable_by_default_statuses: + return {} + + if "date" not in headers or "last-modified" not in headers: + return {} + + date = calendar.timegm(parsedate_tz(headers["date"])) + last_modified = parsedate(headers["last-modified"]) + if date is None or last_modified is None: + return {} + + now = time.time() + current_age = max(0, now - date) + delta = date - calendar.timegm(last_modified) + freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) + if freshness_lifetime <= current_age: + return {} + + expires = date + freshness_lifetime + return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} + + def warning(self, resp): + return None diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/serialize.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/serialize.py new file mode 100644 index 0000000..3b6ec2d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -0,0 +1,188 @@ +import base64 +import io +import json +import zlib + +from pip._vendor import msgpack +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .compat import HTTPResponse, pickle, text_type + + +def _b64_decode_bytes(b): + return base64.b64decode(b.encode("ascii")) + + +def _b64_decode_str(s): + return _b64_decode_bytes(s).decode("utf8") + + +class Serializer(object): + + def dumps(self, request, response, body=None): + response_headers = CaseInsensitiveDict(response.headers) + + if body is None: + body = response.read(decode_content=False) + + # NOTE: 99% sure this is dead code. I'm only leaving it + # here b/c I don't have a test yet to prove + # it. Basically, before using + # `cachecontrol.filewrapper.CallbackFileWrapper`, + # this made an effort to reset the file handle. The + # `CallbackFileWrapper` short circuits this code by + # setting the body as the content is consumed, the + # result being a `body` argument is *always* passed + # into cache_response, and in turn, + # `Serializer.dump`. + response._fp = io.BytesIO(body) + + # NOTE: This is all a bit weird, but it's really important that on + # Python 2.x these objects are unicode and not str, even when + # they contain only ascii. The problem here is that msgpack + # understands the difference between unicode and bytes and we + # have it set to differentiate between them, however Python 2 + # doesn't know the difference. Forcing these to unicode will be + # enough to have msgpack know the difference. + data = { + u"response": { + u"body": body, + u"headers": dict( + (text_type(k), text_type(v)) for k, v in response.headers.items() + ), + u"status": response.status, + u"version": response.version, + u"reason": text_type(response.reason), + u"strict": response.strict, + u"decode_content": response.decode_content, + } + } + + # Construct our vary headers + data[u"vary"] = {} + if u"vary" in response_headers: + varied_headers = response_headers[u"vary"].split(",") + for header in varied_headers: + header = text_type(header).strip() + header_value = request.headers.get(header, None) + if header_value is not None: + header_value = text_type(header_value) + data[u"vary"][header] = header_value + + return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) + + def loads(self, request, data): + # Short circuit if we've been given an empty set of data + if not data: + return + + # Determine what version of the serializer the data was serialized + # with + try: + ver, data = data.split(b",", 1) + except ValueError: + ver = b"cc=0" + + # Make sure that our "ver" is actually a version and isn't a false + # positive from a , being in the data stream. + if ver[:3] != b"cc=": + data = ver + data + ver = b"cc=0" + + # Get the version number out of the cc=N + ver = ver.split(b"=", 1)[-1].decode("ascii") + + # Dispatch to the actual load method for the given version + try: + return getattr(self, "_loads_v{}".format(ver))(request, data) + + except AttributeError: + # This is a version we don't have a loads function for, so we'll + # just treat it as a miss and return None + return + + def prepare_response(self, request, cached): + """Verify our vary headers match and construct a real urllib3 + HTTPResponse object. + """ + # Special case the '*' Vary value as it means we cannot actually + # determine if the cached response is suitable for this request. + # This case is also handled in the controller code when creating + # a cache entry, but is left here for backwards compatibility. + if "*" in cached.get("vary", {}): + return + + # Ensure that the Vary headers for the cached response match our + # request + for header, value in cached.get("vary", {}).items(): + if request.headers.get(header, None) != value: + return + + body_raw = cached["response"].pop("body") + + headers = CaseInsensitiveDict(data=cached["response"]["headers"]) + if headers.get("transfer-encoding", "") == "chunked": + headers.pop("transfer-encoding") + + cached["response"]["headers"] = headers + + try: + body = io.BytesIO(body_raw) + except TypeError: + # This can happen if cachecontrol serialized to v1 format (pickle) + # using Python 2. A Python 2 str(byte string) will be unpickled as + # a Python 3 str (unicode string), which will cause the above to + # fail with: + # + # TypeError: 'str' does not support the buffer interface + body = io.BytesIO(body_raw.encode("utf8")) + + return HTTPResponse(body=body, preload_content=False, **cached["response"]) + + def _loads_v0(self, request, data): + # The original legacy cache data. This doesn't contain enough + # information to construct everything we need, so we'll treat this as + # a miss. + return + + def _loads_v1(self, request, data): + try: + cached = pickle.loads(data) + except ValueError: + return + + return self.prepare_response(request, cached) + + def _loads_v2(self, request, data): + try: + cached = json.loads(zlib.decompress(data).decode("utf8")) + except (ValueError, zlib.error): + return + + # We need to decode the items that we've base64 encoded + cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) + cached["response"]["headers"] = dict( + (_b64_decode_str(k), _b64_decode_str(v)) + for k, v in cached["response"]["headers"].items() + ) + cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"]) + cached["vary"] = dict( + (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) + for k, v in cached["vary"].items() + ) + + return self.prepare_response(request, cached) + + def _loads_v3(self, request, data): + # Due to Python 2 encoding issues, it's impossible to know for sure + # exactly how to load v3 entries, thus we'll treat these as a miss so + # that they get rewritten out as v4 entries. + return + + def _loads_v4(self, request, data): + try: + cached = msgpack.loads(data, raw=False) + except ValueError: + return + + return self.prepare_response(request, cached) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/wrapper.py b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/wrapper.py new file mode 100644 index 0000000..d8e6fc6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/wrapper.py @@ -0,0 +1,29 @@ +from .adapter import CacheControlAdapter +from .cache import DictCache + + +def CacheControl( + sess, + cache=None, + cache_etags=True, + serializer=None, + heuristic=None, + controller_class=None, + adapter_class=None, + cacheable_methods=None, +): + + cache = DictCache() if cache is None else cache + adapter_class = adapter_class or CacheControlAdapter + adapter = adapter_class( + cache, + cache_etags=cache_etags, + serializer=serializer, + heuristic=heuristic, + controller_class=controller_class, + cacheable_methods=cacheable_methods, + ) + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + return sess diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__init__.py new file mode 100644 index 0000000..eebdf88 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__init__.py @@ -0,0 +1,3 @@ +from .core import contents, where + +__version__ = "2021.05.30" diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__main__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__main__.py new file mode 100644 index 0000000..0037634 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from pip._vendor.certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..57bf197 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc new file mode 100644 index 0000000..86f7208 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-39.pyc new file mode 100644 index 0000000..8707695 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/core.py b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/core.py new file mode 100644 index 0000000..f1a0b01 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/certifi/core.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- + +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import os + + +class _PipPatchedCertificate(Exception): + pass + + +try: + raise ImportError # force fallback + # Return a certificate file on disk for a standalone pip zipapp running in + # an isolated build environment to use. Passing --cert to the standalone + # pip does not work since requests calls where() unconditionally on import. + _PIP_STANDALONE_CERT = os.environ.get("_PIP_STANDALONE_CERT") + if _PIP_STANDALONE_CERT: + def where(): + return _PIP_STANDALONE_CERT + raise _PipPatchedCertificate() + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where(): + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + +except _PipPatchedCertificate: + pass + +except ImportError: + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text(_module, _path, encoding="ascii"): + with open(where(), "r", encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where(): + return '/etc/pki/tls/certs/ca-bundle.crt' + + +def contents(): + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__init__.py new file mode 100644 index 0000000..80ad254 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__init__.py @@ -0,0 +1,83 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +from .universaldetector import UniversalDetector +from .enums import InputState +from .version import __version__, VERSION + + +__all__ = ['UniversalDetector', 'detect', 'detect_all', '__version__', 'VERSION'] + + +def detect(byte_str): + """ + Detect the encoding of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + detector = UniversalDetector() + detector.feed(byte_str) + return detector.close() + + +def detect_all(byte_str): + """ + Detect all the possible encodings of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + + detector = UniversalDetector() + detector.feed(byte_str) + detector.close() + + if detector._input_state == InputState.HIGH_BYTE: + results = [] + for prober in detector._charset_probers: + if prober.get_confidence() > detector.MINIMUM_THRESHOLD: + charset_name = prober.charset_name + lower_charset_name = prober.charset_name.lower() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if detector._has_win_bytes: + charset_name = detector.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + results.append({ + 'encoding': charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language, + }) + if len(results) > 0: + return sorted(results, key=lambda result: -result['confidence']) + + return [detector.result] diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..641d785 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc new file mode 100644 index 0000000..40b0194 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc new file mode 100644 index 0000000..441157a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc new file mode 100644 index 0000000..18780c4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc new file mode 100644 index 0000000..5a2973f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc new file mode 100644 index 0000000..794ce4f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc new file mode 100644 index 0000000..18722d8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-39.pyc new file mode 100644 index 0000000..2e7d222 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc new file mode 100644 index 0000000..0b530f6 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc new file mode 100644 index 0000000..3c822c8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc new file mode 100644 index 0000000..688210c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc new file mode 100644 index 0000000..01ec754 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc new file mode 100644 index 0000000..4e0f1f7 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc new file mode 100644 index 0000000..3f617d9 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc new file mode 100644 index 0000000..b989e08 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc new file mode 100644 index 0000000..68f6703 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc new file mode 100644 index 0000000..59b2043 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc new file mode 100644 index 0000000..144d98a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc new file mode 100644 index 0000000..83dd2b0 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc new file mode 100644 index 0000000..64ce314 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc new file mode 100644 index 0000000..30869bc Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc new file mode 100644 index 0000000..1b6ad3e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc new file mode 100644 index 0000000..68810c1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc new file mode 100644 index 0000000..3c2fbfb Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc new file mode 100644 index 0000000..52b7b35 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc new file mode 100644 index 0000000..bed1f99 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc new file mode 100644 index 0000000..e6c1c20 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc new file mode 100644 index 0000000..6a7cfa8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc new file mode 100644 index 0000000..f2cc133 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc new file mode 100644 index 0000000..6164ce0 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc new file mode 100644 index 0000000..90bab4f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc new file mode 100644 index 0000000..ed12b14 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc new file mode 100644 index 0000000..d891859 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc new file mode 100644 index 0000000..175cab1 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc new file mode 100644 index 0000000..fe09e22 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc new file mode 100644 index 0000000..8ea152a Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc new file mode 100644 index 0000000..dd080da Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc new file mode 100644 index 0000000..3d85d52 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-39.pyc new file mode 100644 index 0000000..3e1432d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5freq.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5freq.py new file mode 100644 index 0000000..38f3251 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5freq.py @@ -0,0 +1,386 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5prober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5prober.py new file mode 100644 index 0000000..98f9970 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/chardistribution.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/chardistribution.py new file mode 100644 index 0000000..c0395f4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/chardistribution.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) + + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order = None + self._table_size = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = (self._freq_chars / ((self._total_chars - self._freq_chars) + * self.typical_distribution_ratio)) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 + else: + return -1 diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetgroupprober.py new file mode 100644 index 0000000..5812cef --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetgroupprober.py @@ -0,0 +1,107 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + self._state = ProbingState.FOUND_IT + return self.state + elif state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug('%s not active', prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetprober.py new file mode 100644 index 0000000..eac4e59 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re + +from .enums import ProbingState + + +class CharSetProber(object): + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + return None + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7F])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + + for curr in range(len(buf)): + # Slice here to get bytes instead of an int with Python 3 + buf_char = buf[curr:curr + 1] + # Check if we're coming out of or entering an HTML tag + if buf_char == b'>': + in_tag = False + elif buf_char == b'<': + in_tag = True + + # If current character is not extended-ASCII and not alphabetic... + if buf_char < b'\x80' and not buf_char.isalpha(): + # ...and we're not in a tag + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b' ') + prev = curr + 1 + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__init__.py @@ -0,0 +1 @@ + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..af26bc3 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc new file mode 100644 index 0000000..7f37823 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/chardetect.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/chardetect.py new file mode 100644 index 0000000..6d6f93a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/chardetect.py @@ -0,0 +1,84 @@ +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys + +from pip._vendor.chardet import __version__ +from pip._vendor.chardet.compat import PY2 +from pip._vendor.chardet.universaldetector import UniversalDetector + + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), 'ignore') + if result['encoding']: + return '{}: {} with confidence {}'.format(name, result['encoding'], + result['confidence']) + else: + return '{}: no result'.format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings") + parser.add_argument('input', + help='File whose encoding we would like to determine. \ + (default: stdin)', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument('--version', action='version', + version='%(prog)s {}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) + + +if __name__ == '__main__': + main() diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/codingstatemachine.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/codingstatemachine.py new file mode 100644 index 0000000..68fba44 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/codingstatemachine.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .enums import MachineState + + +class CodingStateMachine(object): + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model['class_table'][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model['char_len_table'][byte_class] + # from byte's class and state_table, we get its next state + curr_state = (self._curr_state * self._model['class_factor'] + + byte_class) + self._curr_state = self._model['state_table'][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model['name'] + + @property + def language(self): + return self._model['language'] diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/compat.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/compat.py new file mode 100644 index 0000000..8941572 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/compat.py @@ -0,0 +1,36 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + string_types = (str, unicode) + text_type = unicode + iteritems = dict.iteritems +else: + PY2 = False + PY3 = True + string_types = (bytes, str) + text_type = str + iteritems = dict.items diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cp949prober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cp949prober.py new file mode 100644 index 0000000..efd793a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/enums.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/enums.py new file mode 100644 index 0000000..0451207 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/enums.py @@ -0,0 +1,76 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + + +class InputState(object): + """ + This enum represents the different states a universal detector can be in. + """ + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + """ + This enum represents the different states a prober can be in. + """ + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + """ + This enum represents the different states a state machine can be in. + """ + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + """ + This enum represents the likelihood of a character following the previous one. + """ + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/escprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/escprober.py new file mode 100644 index 0000000..c70493f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/escprober.py @@ -0,0 +1,101 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + else: + return 0.00 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm or not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/escsm.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/escsm.py new file mode 100644 index 0000000..0069523 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/escsm.py @@ -0,0 +1,246 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +HZ_CLS = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_ST = ( +MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 + 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f + 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 + 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f +) + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL = {'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': "HZ-GB-2312", + 'language': 'Chinese'} + +ISO2022CN_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 + 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f +) + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': "ISO-2022-CN", + 'language': 'Chinese'} + +ISO2022JP_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 +) + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': "ISO-2022-JP", + 'language': 'Japanese'} + +ISO2022KR_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 +) + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': "ISO-2022-KR", + 'language': 'Korean'} + + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/eucjpprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/eucjpprober.py new file mode 100644 index 0000000..20ce8f7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/eucjpprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrfreq.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrfreq.py new file mode 100644 index 0000000..b68078c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrfreq.py @@ -0,0 +1,195 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrprober.py new file mode 100644 index 0000000..345a060 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwfreq.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwfreq.py new file mode 100644 index 0000000..ed7a995 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwfreq.py @@ -0,0 +1,387 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 5376 + +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +) + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwprober.py new file mode 100644 index 0000000..35669cc --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwprober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/gb2312freq.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/gb2312freq.py new file mode 100644 index 0000000..697837b --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/gb2312freq.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/gb2312prober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/gb2312prober.py new file mode 100644 index 0000000..8446d2d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/gb2312prober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/hebrewprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/hebrewprober.py new file mode 100644 index 0000000..b0e1bf4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/hebrewprober.py @@ -0,0 +1,292 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +class HebrewProber(CharSetProber): + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xea + NORMAL_KAF = 0xeb + FINAL_MEM = 0xed + NORMAL_MEM = 0xee + FINAL_NUN = 0xef + NORMAL_NUN = 0xf0 + FINAL_PE = 0xf3 + NORMAL_PE = 0xf4 + FINAL_TSADI = 0xf5 + NORMAL_TSADI = 0xf6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = ' ' + self._before_prev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == ' ': + # We stand on a space - a word just ended + if self._before_prev != ' ': + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ((self._before_prev == ' ') and + (self.is_final(self._prev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._logical_prober.get_confidence() + - self._visual_prober.get_confidence()) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return 'Hebrew' + + @property + def state(self): + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and \ + (self._visual_prober.state == ProbingState.NOT_ME): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/jisfreq.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/jisfreq.py new file mode 100644 index 0000000..83fc082 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) + + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/jpcntx.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/jpcntx.py new file mode 100644 index 0000000..20044e4 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/jpcntx.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i:i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + else: + return self.DONT_KNOW + + def get_order(self, byte_str): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len + + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langbulgarianmodel.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..f5fcf83 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langbulgarianmodel.py @@ -0,0 +1,4649 @@ +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +BULGARIAN_LANG_MODEL = { + 63: { # 'e' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 1, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 45: { # '\xad' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 1, # 'М' + 36: 0, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 31: { # 'А' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 1, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 1, # 'О' + 30: 2, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 2, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 0, # 'и' + 26: 2, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 1, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 32: { # 'Б' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 2, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 1, # 'Щ' + 61: 2, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 1, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 35: { # 'В' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 2, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 2, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 43: { # 'Г' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 1, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 37: { # 'Д' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 2, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 44: { # 'Е' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Х' + 53: 2, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 0, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 1, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 55: { # 'Ж' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 47: { # 'З' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 40: { # 'И' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 2, # 'М' + 36: 2, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 2, # 'Я' + 1: 1, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 3, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 0, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 59: { # 'Й' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 33: { # 'К' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 46: { # 'Л' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 2, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 38: { # 'М' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 36: { # 'Н' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 2, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 1, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 2, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 41: { # 'О' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 2, # 'М' + 36: 2, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 0, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 2, # 'ч' + 27: 0, # 'ш' + 24: 2, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 30: { # 'П' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 2, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 39: { # 'Р' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 2, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 1, # 'с' + 5: 0, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 28: { # 'С' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 3, # 'А' + 32: 2, # 'Б' + 35: 2, # 'В' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 34: { # 'Т' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 2, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 51: { # 'У' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 2, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 2, # 'с' + 5: 1, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 48: { # 'Ф' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 49: { # 'Х' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 53: { # 'Ц' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 50: { # 'Ч' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 54: { # 'Ш' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 1, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 57: { # 'Щ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 1, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 61: { # 'Ъ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Х' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 60: { # 'Ю' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 1, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 0, # 'е' + 23: 2, # 'ж' + 15: 1, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 56: { # 'Я' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 1, # 'В' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 1, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 1: { # 'а' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 18: { # 'б' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 0, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 2, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 3, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 9: { # 'в' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 1, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 0, # 'в' + 20: 2, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 20: { # 'г' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 11: { # 'д' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 1, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 3: { # 'е' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 2, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 23: { # 'ж' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 15: { # 'з' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 2: { # 'и' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 1, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 26: { # 'й' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 12: { # 'к' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 1, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 3, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 10: { # 'л' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 1, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 3, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 14: { # 'м' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 1, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 1, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 6: { # 'н' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 2, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 3, # 'ф' + 25: 2, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 4: { # 'о' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 2, # 'у' + 29: 3, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 13: { # 'п' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 7: { # 'р' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 1, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 3, # 'х' + 22: 3, # 'ц' + 21: 2, # 'ч' + 27: 3, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 1, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 8: { # 'с' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 2, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 5: { # 'т' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 3, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ъ' + 52: 2, # 'ь' + 42: 2, # 'ю' + 16: 3, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 19: { # 'у' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 2, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 2, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 2, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 29: { # 'ф' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 2, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 2, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 25: { # 'х' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'р' + 8: 1, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 1, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 22: { # 'ц' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'р' + 8: 1, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 0, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 21: { # 'ч' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'р' + 8: 0, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 27: { # 'ш' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 2, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 1, # 'т' + 19: 2, # 'у' + 29: 1, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ъ' + 52: 1, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 24: { # 'щ' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 1, # 'р' + 8: 0, # 'с' + 5: 2, # 'т' + 19: 3, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 1, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 2, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 17: { # 'ъ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 1, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 2, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 3, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 2, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 52: { # 'ь' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 1, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 1, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 1, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 42: { # 'ю' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 1, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 1, # 'е' + 23: 2, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 1, # 'о' + 13: 1, # 'п' + 7: 2, # 'р' + 8: 2, # 'с' + 5: 2, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 1, # 'х' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 16: { # 'я' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 1, # 'о' + 13: 2, # 'п' + 7: 2, # 'р' + 8: 3, # 'с' + 5: 3, # 'т' + 19: 1, # 'у' + 29: 1, # 'ф' + 25: 3, # 'х' + 22: 2, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 2, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 1, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 58: { # 'є' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, + 62: { # '№' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'А' + 32: 0, # 'Б' + 35: 0, # 'В' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Н' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Х' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'р' + 8: 0, # 'с' + 5: 0, # 'т' + 19: 0, # 'у' + 29: 0, # 'ф' + 25: 0, # 'х' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ъ' + 52: 0, # 'ь' + 42: 0, # 'ю' + 16: 0, # 'я' + 58: 0, # 'є' + 62: 0, # '№' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +ISO_8859_5_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 77, # 'A' + 66: 90, # 'B' + 67: 99, # 'C' + 68: 100, # 'D' + 69: 72, # 'E' + 70: 109, # 'F' + 71: 107, # 'G' + 72: 101, # 'H' + 73: 79, # 'I' + 74: 185, # 'J' + 75: 81, # 'K' + 76: 102, # 'L' + 77: 76, # 'M' + 78: 94, # 'N' + 79: 82, # 'O' + 80: 110, # 'P' + 81: 186, # 'Q' + 82: 108, # 'R' + 83: 91, # 'S' + 84: 74, # 'T' + 85: 119, # 'U' + 86: 84, # 'V' + 87: 96, # 'W' + 88: 111, # 'X' + 89: 187, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 65, # 'a' + 98: 69, # 'b' + 99: 70, # 'c' + 100: 66, # 'd' + 101: 63, # 'e' + 102: 68, # 'f' + 103: 112, # 'g' + 104: 103, # 'h' + 105: 92, # 'i' + 106: 194, # 'j' + 107: 104, # 'k' + 108: 95, # 'l' + 109: 86, # 'm' + 110: 87, # 'n' + 111: 71, # 'o' + 112: 116, # 'p' + 113: 195, # 'q' + 114: 85, # 'r' + 115: 93, # 's' + 116: 97, # 't' + 117: 113, # 'u' + 118: 196, # 'v' + 119: 197, # 'w' + 120: 198, # 'x' + 121: 199, # 'y' + 122: 200, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 194, # '\x80' + 129: 195, # '\x81' + 130: 196, # '\x82' + 131: 197, # '\x83' + 132: 198, # '\x84' + 133: 199, # '\x85' + 134: 200, # '\x86' + 135: 201, # '\x87' + 136: 202, # '\x88' + 137: 203, # '\x89' + 138: 204, # '\x8a' + 139: 205, # '\x8b' + 140: 206, # '\x8c' + 141: 207, # '\x8d' + 142: 208, # '\x8e' + 143: 209, # '\x8f' + 144: 210, # '\x90' + 145: 211, # '\x91' + 146: 212, # '\x92' + 147: 213, # '\x93' + 148: 214, # '\x94' + 149: 215, # '\x95' + 150: 216, # '\x96' + 151: 217, # '\x97' + 152: 218, # '\x98' + 153: 219, # '\x99' + 154: 220, # '\x9a' + 155: 221, # '\x9b' + 156: 222, # '\x9c' + 157: 223, # '\x9d' + 158: 224, # '\x9e' + 159: 225, # '\x9f' + 160: 81, # '\xa0' + 161: 226, # 'Ё' + 162: 227, # 'Ђ' + 163: 228, # 'Ѓ' + 164: 229, # 'Є' + 165: 230, # 'Ѕ' + 166: 105, # 'І' + 167: 231, # 'Ї' + 168: 232, # 'Ј' + 169: 233, # 'Љ' + 170: 234, # 'Њ' + 171: 235, # 'Ћ' + 172: 236, # 'Ќ' + 173: 45, # '\xad' + 174: 237, # 'Ў' + 175: 238, # 'Џ' + 176: 31, # 'А' + 177: 32, # 'Б' + 178: 35, # 'В' + 179: 43, # 'Г' + 180: 37, # 'Д' + 181: 44, # 'Е' + 182: 55, # 'Ж' + 183: 47, # 'З' + 184: 40, # 'И' + 185: 59, # 'Й' + 186: 33, # 'К' + 187: 46, # 'Л' + 188: 38, # 'М' + 189: 36, # 'Н' + 190: 41, # 'О' + 191: 30, # 'П' + 192: 39, # 'Р' + 193: 28, # 'С' + 194: 34, # 'Т' + 195: 51, # 'У' + 196: 48, # 'Ф' + 197: 49, # 'Х' + 198: 53, # 'Ц' + 199: 50, # 'Ч' + 200: 54, # 'Ш' + 201: 57, # 'Щ' + 202: 61, # 'Ъ' + 203: 239, # 'Ы' + 204: 67, # 'Ь' + 205: 240, # 'Э' + 206: 60, # 'Ю' + 207: 56, # 'Я' + 208: 1, # 'а' + 209: 18, # 'б' + 210: 9, # 'в' + 211: 20, # 'г' + 212: 11, # 'д' + 213: 3, # 'е' + 214: 23, # 'ж' + 215: 15, # 'з' + 216: 2, # 'и' + 217: 26, # 'й' + 218: 12, # 'к' + 219: 10, # 'л' + 220: 14, # 'м' + 221: 6, # 'н' + 222: 4, # 'о' + 223: 13, # 'п' + 224: 7, # 'р' + 225: 8, # 'с' + 226: 5, # 'т' + 227: 19, # 'у' + 228: 29, # 'ф' + 229: 25, # 'х' + 230: 22, # 'ц' + 231: 21, # 'ч' + 232: 27, # 'ш' + 233: 24, # 'щ' + 234: 17, # 'ъ' + 235: 75, # 'ы' + 236: 52, # 'ь' + 237: 241, # 'э' + 238: 42, # 'ю' + 239: 16, # 'я' + 240: 62, # '№' + 241: 242, # 'ё' + 242: 243, # 'ђ' + 243: 244, # 'ѓ' + 244: 58, # 'є' + 245: 245, # 'ѕ' + 246: 98, # 'і' + 247: 246, # 'ї' + 248: 247, # 'ј' + 249: 248, # 'љ' + 250: 249, # 'њ' + 251: 250, # 'ћ' + 252: 251, # 'ќ' + 253: 91, # '§' + 254: 252, # 'ў' + 255: 253, # 'џ' +} + +ISO_8859_5_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5', + language='Bulgarian', + char_to_order_map=ISO_8859_5_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet='АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя') + +WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 77, # 'A' + 66: 90, # 'B' + 67: 99, # 'C' + 68: 100, # 'D' + 69: 72, # 'E' + 70: 109, # 'F' + 71: 107, # 'G' + 72: 101, # 'H' + 73: 79, # 'I' + 74: 185, # 'J' + 75: 81, # 'K' + 76: 102, # 'L' + 77: 76, # 'M' + 78: 94, # 'N' + 79: 82, # 'O' + 80: 110, # 'P' + 81: 186, # 'Q' + 82: 108, # 'R' + 83: 91, # 'S' + 84: 74, # 'T' + 85: 119, # 'U' + 86: 84, # 'V' + 87: 96, # 'W' + 88: 111, # 'X' + 89: 187, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 65, # 'a' + 98: 69, # 'b' + 99: 70, # 'c' + 100: 66, # 'd' + 101: 63, # 'e' + 102: 68, # 'f' + 103: 112, # 'g' + 104: 103, # 'h' + 105: 92, # 'i' + 106: 194, # 'j' + 107: 104, # 'k' + 108: 95, # 'l' + 109: 86, # 'm' + 110: 87, # 'n' + 111: 71, # 'o' + 112: 116, # 'p' + 113: 195, # 'q' + 114: 85, # 'r' + 115: 93, # 's' + 116: 97, # 't' + 117: 113, # 'u' + 118: 196, # 'v' + 119: 197, # 'w' + 120: 198, # 'x' + 121: 199, # 'y' + 122: 200, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 206, # 'Ђ' + 129: 207, # 'Ѓ' + 130: 208, # '‚' + 131: 209, # 'ѓ' + 132: 210, # '„' + 133: 211, # '…' + 134: 212, # '†' + 135: 213, # '‡' + 136: 120, # '€' + 137: 214, # '‰' + 138: 215, # 'Љ' + 139: 216, # '‹' + 140: 217, # 'Њ' + 141: 218, # 'Ќ' + 142: 219, # 'Ћ' + 143: 220, # 'Џ' + 144: 221, # 'ђ' + 145: 78, # '‘' + 146: 64, # '’' + 147: 83, # '“' + 148: 121, # '”' + 149: 98, # '•' + 150: 117, # '–' + 151: 105, # '—' + 152: 222, # None + 153: 223, # '™' + 154: 224, # 'љ' + 155: 225, # '›' + 156: 226, # 'њ' + 157: 227, # 'ќ' + 158: 228, # 'ћ' + 159: 229, # 'џ' + 160: 88, # '\xa0' + 161: 230, # 'Ў' + 162: 231, # 'ў' + 163: 232, # 'Ј' + 164: 233, # '¤' + 165: 122, # 'Ґ' + 166: 89, # '¦' + 167: 106, # '§' + 168: 234, # 'Ё' + 169: 235, # '©' + 170: 236, # 'Є' + 171: 237, # '«' + 172: 238, # '¬' + 173: 45, # '\xad' + 174: 239, # '®' + 175: 240, # 'Ї' + 176: 73, # '°' + 177: 80, # '±' + 178: 118, # 'І' + 179: 114, # 'і' + 180: 241, # 'ґ' + 181: 242, # 'µ' + 182: 243, # '¶' + 183: 244, # '·' + 184: 245, # 'ё' + 185: 62, # '№' + 186: 58, # 'є' + 187: 246, # '»' + 188: 247, # 'ј' + 189: 248, # 'Ѕ' + 190: 249, # 'ѕ' + 191: 250, # 'ї' + 192: 31, # 'А' + 193: 32, # 'Б' + 194: 35, # 'В' + 195: 43, # 'Г' + 196: 37, # 'Д' + 197: 44, # 'Е' + 198: 55, # 'Ж' + 199: 47, # 'З' + 200: 40, # 'И' + 201: 59, # 'Й' + 202: 33, # 'К' + 203: 46, # 'Л' + 204: 38, # 'М' + 205: 36, # 'Н' + 206: 41, # 'О' + 207: 30, # 'П' + 208: 39, # 'Р' + 209: 28, # 'С' + 210: 34, # 'Т' + 211: 51, # 'У' + 212: 48, # 'Ф' + 213: 49, # 'Х' + 214: 53, # 'Ц' + 215: 50, # 'Ч' + 216: 54, # 'Ш' + 217: 57, # 'Щ' + 218: 61, # 'Ъ' + 219: 251, # 'Ы' + 220: 67, # 'Ь' + 221: 252, # 'Э' + 222: 60, # 'Ю' + 223: 56, # 'Я' + 224: 1, # 'а' + 225: 18, # 'б' + 226: 9, # 'в' + 227: 20, # 'г' + 228: 11, # 'д' + 229: 3, # 'е' + 230: 23, # 'ж' + 231: 15, # 'з' + 232: 2, # 'и' + 233: 26, # 'й' + 234: 12, # 'к' + 235: 10, # 'л' + 236: 14, # 'м' + 237: 6, # 'н' + 238: 4, # 'о' + 239: 13, # 'п' + 240: 7, # 'р' + 241: 8, # 'с' + 242: 5, # 'т' + 243: 19, # 'у' + 244: 29, # 'ф' + 245: 25, # 'х' + 246: 22, # 'ц' + 247: 21, # 'ч' + 248: 27, # 'ш' + 249: 24, # 'щ' + 250: 17, # 'ъ' + 251: 75, # 'ы' + 252: 52, # 'ь' + 253: 253, # 'э' + 254: 42, # 'ю' + 255: 16, # 'я' +} + +WINDOWS_1251_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251', + language='Bulgarian', + char_to_order_map=WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet='АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя') + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langgreekmodel.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langgreekmodel.py new file mode 100644 index 0000000..e120745 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langgreekmodel.py @@ -0,0 +1,4397 @@ +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +GREEK_LANG_MODEL = { + 60: { # 'e' + 60: 2, # 'e' + 55: 1, # 'o' + 58: 2, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 55: { # 'o' + 60: 0, # 'e' + 55: 2, # 'o' + 58: 2, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 1, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 58: { # 't' + 60: 2, # 'e' + 55: 1, # 'o' + 58: 1, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 1, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 36: { # '·' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 61: { # 'Ά' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 1, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 1, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 46: { # 'Έ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 2, # 'β' + 20: 2, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 1, # 'σ' + 2: 2, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 54: { # 'Ό' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 31: { # 'Α' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 2, # 'Β' + 43: 2, # 'Γ' + 41: 1, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 2, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 1, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 2, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 2, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 1, # 'θ' + 5: 0, # 'ι' + 11: 2, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 2, # 'ς' + 7: 2, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 51: { # 'Β' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 1, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 43: { # 'Γ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 1, # 'Α' + 51: 0, # 'Β' + 43: 2, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 1, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 41: { # 'Δ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 1, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 34: { # 'Ε' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 2, # 'Γ' + 41: 2, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 1, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 2, # 'Χ' + 57: 2, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 1, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 1, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 2, # 'τ' + 12: 2, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 1, # 'ύ' + 27: 0, # 'ώ' + }, + 40: { # 'Η' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 1, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 2, # 'Θ' + 47: 0, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 52: { # 'Θ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 1, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 47: { # 'Ι' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 1, # 'Β' + 43: 1, # 'Γ' + 41: 2, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 2, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 1, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 1, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 44: { # 'Κ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 1, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 1, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 53: { # 'Λ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 2, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 2, # 'Σ' + 33: 0, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 1, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 38: { # 'Μ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 2, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 2, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 49: { # 'Ν' + 60: 2, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 1, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 1, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 59: { # 'Ξ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 39: { # 'Ο' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 1, # 'Β' + 43: 2, # 'Γ' + 41: 2, # 'Δ' + 34: 2, # 'Ε' + 40: 1, # 'Η' + 52: 2, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 2, # 'Φ' + 50: 2, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 2, # 'υ' + 28: 1, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 35: { # 'Π' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 2, # 'Λ' + 38: 1, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 1, # 'έ' + 22: 1, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 3, # 'ώ' + }, + 48: { # 'Ρ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 1, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 1, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 1, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 37: { # 'Σ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Υ' + 56: 0, # 'Φ' + 50: 2, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 2, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 2, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 2, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 33: { # 'Τ' + 60: 0, # 'e' + 55: 1, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 2, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 1, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 2, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 3, # 'ώ' + }, + 45: { # 'Υ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 2, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 2, # 'Η' + 52: 2, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 1, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 56: { # 'Φ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 1, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 1, # 'ύ' + 27: 1, # 'ώ' + }, + 50: { # 'Χ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 1, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 1, # 'Ν' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 1, # 'Ω' + 17: 2, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 57: { # 'Ω' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 1, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 2, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 2, # 'ρ' + 14: 2, # 'ς' + 7: 2, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 17: { # 'ά' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 3, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 3, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 18: { # 'έ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 3, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 22: { # 'ή' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 15: { # 'ί' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 3, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 1, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 1: { # 'α' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 2, # 'ε' + 32: 3, # 'ζ' + 13: 1, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 0, # 'ώ' + }, + 29: { # 'β' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 2, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 3, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 20: { # 'γ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 3, # 'ώ' + }, + 21: { # 'δ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 3: { # 'ε' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 2, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 2, # 'ε' + 32: 2, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ό' + 26: 3, # 'ύ' + 27: 2, # 'ώ' + }, + 32: { # 'ζ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 1, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 2, # 'ώ' + }, + 13: { # 'η' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 25: { # 'θ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 1, # 'λ' + 10: 3, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 5: { # 'ι' + 60: 0, # 'e' + 55: 1, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 0, # 'ύ' + 27: 3, # 'ώ' + }, + 11: { # 'κ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 2, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 16: { # 'λ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 1, # 'β' + 20: 2, # 'γ' + 21: 1, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 10: { # 'μ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 2, # 'υ' + 28: 3, # 'φ' + 23: 0, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 6: { # 'ν' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 1, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 30: { # 'ξ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 2, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ό' + 26: 3, # 'ύ' + 27: 1, # 'ώ' + }, + 4: { # 'ο' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 2, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 1, # 'ό' + 26: 3, # 'ύ' + 27: 2, # 'ώ' + }, + 9: { # 'π' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 3, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 2, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 3, # 'ώ' + }, + 8: { # 'ρ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 1, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 3, # 'ο' + 9: 2, # 'π' + 8: 2, # 'ρ' + 14: 0, # 'ς' + 7: 2, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 14: { # 'ς' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 0, # 'τ' + 12: 0, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 7: { # 'σ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 2, # 'ώ' + }, + 2: { # 'τ' + 60: 0, # 'e' + 55: 2, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 12: { # 'υ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 2, # 'ε' + 32: 2, # 'ζ' + 13: 2, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ό' + 26: 0, # 'ύ' + 27: 2, # 'ώ' + }, + 28: { # 'φ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 1, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 2, # 'ύ' + 27: 2, # 'ώ' + }, + 23: { # 'χ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'π' + 8: 3, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 3, # 'τ' + 12: 3, # 'υ' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ό' + 26: 3, # 'ύ' + 27: 3, # 'ώ' + }, + 42: { # 'ψ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 1, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'π' + 8: 0, # 'ρ' + 14: 0, # 'ς' + 7: 0, # 'σ' + 2: 2, # 'τ' + 12: 1, # 'υ' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 24: { # 'ω' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 1, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 19: { # 'ό' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 1, # 'ε' + 32: 2, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 1, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 26: { # 'ύ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 2, # 'β' + 20: 2, # 'γ' + 21: 1, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, + 27: { # 'ώ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'Ό' + 31: 0, # 'Α' + 51: 0, # 'Β' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Ν' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Υ' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 1, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 1, # 'η' + 25: 2, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 1, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'π' + 8: 3, # 'ρ' + 14: 3, # 'ς' + 7: 3, # 'σ' + 2: 3, # 'τ' + 12: 0, # 'υ' + 28: 1, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ό' + 26: 0, # 'ύ' + 27: 0, # 'ώ' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1253_GREEK_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 82, # 'A' + 66: 100, # 'B' + 67: 104, # 'C' + 68: 94, # 'D' + 69: 98, # 'E' + 70: 101, # 'F' + 71: 116, # 'G' + 72: 102, # 'H' + 73: 111, # 'I' + 74: 187, # 'J' + 75: 117, # 'K' + 76: 92, # 'L' + 77: 88, # 'M' + 78: 113, # 'N' + 79: 85, # 'O' + 80: 79, # 'P' + 81: 118, # 'Q' + 82: 105, # 'R' + 83: 83, # 'S' + 84: 67, # 'T' + 85: 114, # 'U' + 86: 119, # 'V' + 87: 95, # 'W' + 88: 99, # 'X' + 89: 109, # 'Y' + 90: 188, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 72, # 'a' + 98: 70, # 'b' + 99: 80, # 'c' + 100: 81, # 'd' + 101: 60, # 'e' + 102: 96, # 'f' + 103: 93, # 'g' + 104: 89, # 'h' + 105: 68, # 'i' + 106: 120, # 'j' + 107: 97, # 'k' + 108: 77, # 'l' + 109: 86, # 'm' + 110: 69, # 'n' + 111: 55, # 'o' + 112: 78, # 'p' + 113: 115, # 'q' + 114: 65, # 'r' + 115: 66, # 's' + 116: 58, # 't' + 117: 76, # 'u' + 118: 106, # 'v' + 119: 103, # 'w' + 120: 87, # 'x' + 121: 107, # 'y' + 122: 112, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 255, # '€' + 129: 255, # None + 130: 255, # '‚' + 131: 255, # 'ƒ' + 132: 255, # '„' + 133: 255, # '…' + 134: 255, # '†' + 135: 255, # '‡' + 136: 255, # None + 137: 255, # '‰' + 138: 255, # None + 139: 255, # '‹' + 140: 255, # None + 141: 255, # None + 142: 255, # None + 143: 255, # None + 144: 255, # None + 145: 255, # '‘' + 146: 255, # '’' + 147: 255, # '“' + 148: 255, # '”' + 149: 255, # '•' + 150: 255, # '–' + 151: 255, # '—' + 152: 255, # None + 153: 255, # '™' + 154: 255, # None + 155: 255, # '›' + 156: 255, # None + 157: 255, # None + 158: 255, # None + 159: 255, # None + 160: 253, # '\xa0' + 161: 233, # '΅' + 162: 61, # 'Ά' + 163: 253, # '£' + 164: 253, # '¤' + 165: 253, # '¥' + 166: 253, # '¦' + 167: 253, # '§' + 168: 253, # '¨' + 169: 253, # '©' + 170: 253, # None + 171: 253, # '«' + 172: 253, # '¬' + 173: 74, # '\xad' + 174: 253, # '®' + 175: 253, # '―' + 176: 253, # '°' + 177: 253, # '±' + 178: 253, # '²' + 179: 253, # '³' + 180: 247, # '΄' + 181: 253, # 'µ' + 182: 253, # '¶' + 183: 36, # '·' + 184: 46, # 'Έ' + 185: 71, # 'Ή' + 186: 73, # 'Ί' + 187: 253, # '»' + 188: 54, # 'Ό' + 189: 253, # '½' + 190: 108, # 'Ύ' + 191: 123, # 'Ώ' + 192: 110, # 'ΐ' + 193: 31, # 'Α' + 194: 51, # 'Β' + 195: 43, # 'Γ' + 196: 41, # 'Δ' + 197: 34, # 'Ε' + 198: 91, # 'Ζ' + 199: 40, # 'Η' + 200: 52, # 'Θ' + 201: 47, # 'Ι' + 202: 44, # 'Κ' + 203: 53, # 'Λ' + 204: 38, # 'Μ' + 205: 49, # 'Ν' + 206: 59, # 'Ξ' + 207: 39, # 'Ο' + 208: 35, # 'Π' + 209: 48, # 'Ρ' + 210: 250, # None + 211: 37, # 'Σ' + 212: 33, # 'Τ' + 213: 45, # 'Υ' + 214: 56, # 'Φ' + 215: 50, # 'Χ' + 216: 84, # 'Ψ' + 217: 57, # 'Ω' + 218: 120, # 'Ϊ' + 219: 121, # 'Ϋ' + 220: 17, # 'ά' + 221: 18, # 'έ' + 222: 22, # 'ή' + 223: 15, # 'ί' + 224: 124, # 'ΰ' + 225: 1, # 'α' + 226: 29, # 'β' + 227: 20, # 'γ' + 228: 21, # 'δ' + 229: 3, # 'ε' + 230: 32, # 'ζ' + 231: 13, # 'η' + 232: 25, # 'θ' + 233: 5, # 'ι' + 234: 11, # 'κ' + 235: 16, # 'λ' + 236: 10, # 'μ' + 237: 6, # 'ν' + 238: 30, # 'ξ' + 239: 4, # 'ο' + 240: 9, # 'π' + 241: 8, # 'ρ' + 242: 14, # 'ς' + 243: 7, # 'σ' + 244: 2, # 'τ' + 245: 12, # 'υ' + 246: 28, # 'φ' + 247: 23, # 'χ' + 248: 42, # 'ψ' + 249: 24, # 'ω' + 250: 64, # 'ϊ' + 251: 75, # 'ϋ' + 252: 19, # 'ό' + 253: 26, # 'ύ' + 254: 27, # 'ώ' + 255: 253, # None +} + +WINDOWS_1253_GREEK_MODEL = SingleByteCharSetModel(charset_name='windows-1253', + language='Greek', + char_to_order_map=WINDOWS_1253_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet='ΆΈΉΊΌΎΏΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπρςστυφχψωόύώ') + +ISO_8859_7_GREEK_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 82, # 'A' + 66: 100, # 'B' + 67: 104, # 'C' + 68: 94, # 'D' + 69: 98, # 'E' + 70: 101, # 'F' + 71: 116, # 'G' + 72: 102, # 'H' + 73: 111, # 'I' + 74: 187, # 'J' + 75: 117, # 'K' + 76: 92, # 'L' + 77: 88, # 'M' + 78: 113, # 'N' + 79: 85, # 'O' + 80: 79, # 'P' + 81: 118, # 'Q' + 82: 105, # 'R' + 83: 83, # 'S' + 84: 67, # 'T' + 85: 114, # 'U' + 86: 119, # 'V' + 87: 95, # 'W' + 88: 99, # 'X' + 89: 109, # 'Y' + 90: 188, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 72, # 'a' + 98: 70, # 'b' + 99: 80, # 'c' + 100: 81, # 'd' + 101: 60, # 'e' + 102: 96, # 'f' + 103: 93, # 'g' + 104: 89, # 'h' + 105: 68, # 'i' + 106: 120, # 'j' + 107: 97, # 'k' + 108: 77, # 'l' + 109: 86, # 'm' + 110: 69, # 'n' + 111: 55, # 'o' + 112: 78, # 'p' + 113: 115, # 'q' + 114: 65, # 'r' + 115: 66, # 's' + 116: 58, # 't' + 117: 76, # 'u' + 118: 106, # 'v' + 119: 103, # 'w' + 120: 87, # 'x' + 121: 107, # 'y' + 122: 112, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 255, # '\x80' + 129: 255, # '\x81' + 130: 255, # '\x82' + 131: 255, # '\x83' + 132: 255, # '\x84' + 133: 255, # '\x85' + 134: 255, # '\x86' + 135: 255, # '\x87' + 136: 255, # '\x88' + 137: 255, # '\x89' + 138: 255, # '\x8a' + 139: 255, # '\x8b' + 140: 255, # '\x8c' + 141: 255, # '\x8d' + 142: 255, # '\x8e' + 143: 255, # '\x8f' + 144: 255, # '\x90' + 145: 255, # '\x91' + 146: 255, # '\x92' + 147: 255, # '\x93' + 148: 255, # '\x94' + 149: 255, # '\x95' + 150: 255, # '\x96' + 151: 255, # '\x97' + 152: 255, # '\x98' + 153: 255, # '\x99' + 154: 255, # '\x9a' + 155: 255, # '\x9b' + 156: 255, # '\x9c' + 157: 255, # '\x9d' + 158: 255, # '\x9e' + 159: 255, # '\x9f' + 160: 253, # '\xa0' + 161: 233, # '‘' + 162: 90, # '’' + 163: 253, # '£' + 164: 253, # '€' + 165: 253, # '₯' + 166: 253, # '¦' + 167: 253, # '§' + 168: 253, # '¨' + 169: 253, # '©' + 170: 253, # 'ͺ' + 171: 253, # '«' + 172: 253, # '¬' + 173: 74, # '\xad' + 174: 253, # None + 175: 253, # '―' + 176: 253, # '°' + 177: 253, # '±' + 178: 253, # '²' + 179: 253, # '³' + 180: 247, # '΄' + 181: 248, # '΅' + 182: 61, # 'Ά' + 183: 36, # '·' + 184: 46, # 'Έ' + 185: 71, # 'Ή' + 186: 73, # 'Ί' + 187: 253, # '»' + 188: 54, # 'Ό' + 189: 253, # '½' + 190: 108, # 'Ύ' + 191: 123, # 'Ώ' + 192: 110, # 'ΐ' + 193: 31, # 'Α' + 194: 51, # 'Β' + 195: 43, # 'Γ' + 196: 41, # 'Δ' + 197: 34, # 'Ε' + 198: 91, # 'Ζ' + 199: 40, # 'Η' + 200: 52, # 'Θ' + 201: 47, # 'Ι' + 202: 44, # 'Κ' + 203: 53, # 'Λ' + 204: 38, # 'Μ' + 205: 49, # 'Ν' + 206: 59, # 'Ξ' + 207: 39, # 'Ο' + 208: 35, # 'Π' + 209: 48, # 'Ρ' + 210: 250, # None + 211: 37, # 'Σ' + 212: 33, # 'Τ' + 213: 45, # 'Υ' + 214: 56, # 'Φ' + 215: 50, # 'Χ' + 216: 84, # 'Ψ' + 217: 57, # 'Ω' + 218: 120, # 'Ϊ' + 219: 121, # 'Ϋ' + 220: 17, # 'ά' + 221: 18, # 'έ' + 222: 22, # 'ή' + 223: 15, # 'ί' + 224: 124, # 'ΰ' + 225: 1, # 'α' + 226: 29, # 'β' + 227: 20, # 'γ' + 228: 21, # 'δ' + 229: 3, # 'ε' + 230: 32, # 'ζ' + 231: 13, # 'η' + 232: 25, # 'θ' + 233: 5, # 'ι' + 234: 11, # 'κ' + 235: 16, # 'λ' + 236: 10, # 'μ' + 237: 6, # 'ν' + 238: 30, # 'ξ' + 239: 4, # 'ο' + 240: 9, # 'π' + 241: 8, # 'ρ' + 242: 14, # 'ς' + 243: 7, # 'σ' + 244: 2, # 'τ' + 245: 12, # 'υ' + 246: 28, # 'φ' + 247: 23, # 'χ' + 248: 42, # 'ψ' + 249: 24, # 'ω' + 250: 64, # 'ϊ' + 251: 75, # 'ϋ' + 252: 19, # 'ό' + 253: 26, # 'ύ' + 254: 27, # 'ώ' + 255: 253, # None +} + +ISO_8859_7_GREEK_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-7', + language='Greek', + char_to_order_map=ISO_8859_7_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet='ΆΈΉΊΌΎΏΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπρςστυφχψωόύώ') + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langhebrewmodel.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langhebrewmodel.py new file mode 100644 index 0000000..2a19e61 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langhebrewmodel.py @@ -0,0 +1,4382 @@ +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +HEBREW_LANG_MODEL = { + 50: { # 'a' + 50: 0, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 2, # 'l' + 54: 2, # 'n' + 49: 0, # 'o' + 51: 2, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 1, # 'ק' + 7: 0, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 60: { # 'c' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 61: { # 'd' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 0, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 42: { # 'e' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 2, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 2, # 'l' + 54: 2, # 'n' + 49: 1, # 'o' + 51: 2, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 1, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 53: { # 'i' + 50: 1, # 'a' + 60: 2, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 0, # 'i' + 56: 1, # 'l' + 54: 2, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 56: { # 'l' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 2, # 'e' + 53: 2, # 'i' + 56: 2, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 54: { # 'n' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 49: { # 'o' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 2, # 'n' + 49: 1, # 'o' + 51: 2, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 51: { # 'r' + 50: 2, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 2, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 43: { # 's' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 2, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 2, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 44: { # 't' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 2, # 'e' + 53: 2, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 63: { # 'u' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 34: { # '\xa0' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 1, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 2, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 55: { # '´' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 2, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 1, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 48: { # '¼' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 39: { # '½' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 57: { # '¾' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 30: { # 'ְ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 2, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 1, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 2, # 'ע' + 26: 0, # 'ף' + 18: 2, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 59: { # 'ֱ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 1, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 0, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 41: { # 'ֲ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 0, # 'ם' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 33: { # 'ִ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 1, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 0, # 'ַ' + 29: 1, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 2, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 37: { # 'ֵ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 1, # 'ַ' + 29: 1, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 1, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 36: { # 'ֶ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 1, # 'ַ' + 29: 1, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 1, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 2, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 31: { # 'ַ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 1, # 'ֶ' + 31: 0, # 'ַ' + 29: 2, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 2, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 29: { # 'ָ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 1, # 'ַ' + 29: 2, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 2, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 35: { # 'ֹ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 2, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 2, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 62: { # 'ֻ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 2, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 1, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 28: { # 'ּ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 3, # 'ְ' + 59: 0, # 'ֱ' + 41: 1, # 'ֲ' + 33: 3, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 3, # 'ַ' + 29: 3, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 2, # 'ׁ' + 45: 1, # 'ׂ' + 9: 2, # 'א' + 8: 2, # 'ב' + 20: 1, # 'ג' + 16: 2, # 'ד' + 3: 1, # 'ה' + 2: 2, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 2, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 2, # 'ל' + 11: 1, # 'ם' + 6: 2, # 'מ' + 23: 1, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 38: { # 'ׁ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 2, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 45: { # 'ׂ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 2, # 'ֶ' + 31: 1, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 2, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 9: { # 'א' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 2, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 2, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 8: { # 'ב' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 3, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 1, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 20: { # 'ג' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 2, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 1, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 0, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 16: { # 'ד' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 1, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 3: { # 'ה' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 1, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 3, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 0, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 2: { # 'ו' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 3, # 'ֹ' + 62: 0, # 'ֻ' + 28: 3, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 3, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 24: { # 'ז' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 1, # 'ֲ' + 33: 1, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 2, # 'ב' + 20: 2, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 2, # 'ח' + 22: 1, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 2, # 'נ' + 19: 1, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 1, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 14: { # 'ח' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 1, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 2, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 1, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 22: { # 'ט' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 1, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 1, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 3, # 'ר' + 10: 2, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 1: { # 'י' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 3, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 25: { # 'ך' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 2, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 1, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 15: { # 'כ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 3, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 2, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 2, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 4: { # 'ל' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 3, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 11: { # 'ם' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 1, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 6: { # 'מ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 0, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 23: { # 'ן' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 1, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 12: { # 'נ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 19: { # 'ס' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 1, # 'ָ' + 35: 1, # 'ֹ' + 62: 2, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 1, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 3, # 'ף' + 18: 3, # 'פ' + 27: 0, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 1, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 13: { # 'ע' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 1, # 'ֱ' + 41: 2, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 1, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 2, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 2, # 'ע' + 26: 1, # 'ף' + 18: 2, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 26: { # 'ף' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 1, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 18: { # 'פ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 1, # 'ֵ' + 36: 2, # 'ֶ' + 31: 1, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 2, # 'ב' + 20: 3, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 2, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 2, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 27: { # 'ץ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 1, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 21: { # 'צ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 1, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 1, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 0, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 17: { # 'ק' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 1, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 2, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 1, # 'ך' + 15: 1, # 'כ' + 4: 3, # 'ל' + 11: 2, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 2, # 'ץ' + 21: 3, # 'צ' + 17: 2, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 7: { # 'ר' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 2, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 1, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 2, # 'ֹ' + 62: 1, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 3, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 3, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 3, # 'ץ' + 21: 3, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 10: { # 'ש' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 1, # 'ִ' + 37: 1, # 'ֵ' + 36: 1, # 'ֶ' + 31: 1, # 'ַ' + 29: 1, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 3, # 'ׁ' + 45: 2, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 3, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 3, # 'ט' + 1: 3, # 'י' + 25: 3, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 1, # '…' + }, + 5: { # 'ת' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 1, # '½' + 57: 0, # '¾' + 30: 2, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 2, # 'ִ' + 37: 2, # 'ֵ' + 36: 2, # 'ֶ' + 31: 2, # 'ַ' + 29: 2, # 'ָ' + 35: 1, # 'ֹ' + 62: 1, # 'ֻ' + 28: 2, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 3, # 'א' + 8: 3, # 'ב' + 20: 3, # 'ג' + 16: 2, # 'ד' + 3: 3, # 'ה' + 2: 3, # 'ו' + 24: 2, # 'ז' + 14: 3, # 'ח' + 22: 2, # 'ט' + 1: 3, # 'י' + 25: 2, # 'ך' + 15: 3, # 'כ' + 4: 3, # 'ל' + 11: 3, # 'ם' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # 'נ' + 19: 2, # 'ס' + 13: 3, # 'ע' + 26: 2, # 'ף' + 18: 3, # 'פ' + 27: 1, # 'ץ' + 21: 2, # 'צ' + 17: 3, # 'ק' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, + 32: { # '–' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 1, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 52: { # '’' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 1, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 47: { # '“' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 2, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 1, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 1, # 'ח' + 22: 1, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 1, # 'ס' + 13: 1, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 1, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 46: { # '”' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 1, # 'ב' + 20: 1, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 1, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 0, # '†' + 40: 0, # '…' + }, + 58: { # '†' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 0, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 0, # 'ה' + 2: 0, # 'ו' + 24: 0, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 0, # 'י' + 25: 0, # 'ך' + 15: 0, # 'כ' + 4: 0, # 'ל' + 11: 0, # 'ם' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 0, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # '”' + 58: 2, # '†' + 40: 0, # '…' + }, + 40: { # '…' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'ְ' + 59: 0, # 'ֱ' + 41: 0, # 'ֲ' + 33: 0, # 'ִ' + 37: 0, # 'ֵ' + 36: 0, # 'ֶ' + 31: 0, # 'ַ' + 29: 0, # 'ָ' + 35: 0, # 'ֹ' + 62: 0, # 'ֻ' + 28: 0, # 'ּ' + 38: 0, # 'ׁ' + 45: 0, # 'ׂ' + 9: 1, # 'א' + 8: 0, # 'ב' + 20: 0, # 'ג' + 16: 0, # 'ד' + 3: 1, # 'ה' + 2: 1, # 'ו' + 24: 1, # 'ז' + 14: 0, # 'ח' + 22: 0, # 'ט' + 1: 1, # 'י' + 25: 0, # 'ך' + 15: 1, # 'כ' + 4: 1, # 'ל' + 11: 0, # 'ם' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # 'נ' + 19: 0, # 'ס' + 13: 0, # 'ע' + 26: 0, # 'ף' + 18: 1, # 'פ' + 27: 0, # 'ץ' + 21: 0, # 'צ' + 17: 0, # 'ק' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # '”' + 58: 0, # '†' + 40: 2, # '…' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1255_HEBREW_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 69, # 'A' + 66: 91, # 'B' + 67: 79, # 'C' + 68: 80, # 'D' + 69: 92, # 'E' + 70: 89, # 'F' + 71: 97, # 'G' + 72: 90, # 'H' + 73: 68, # 'I' + 74: 111, # 'J' + 75: 112, # 'K' + 76: 82, # 'L' + 77: 73, # 'M' + 78: 95, # 'N' + 79: 85, # 'O' + 80: 78, # 'P' + 81: 121, # 'Q' + 82: 86, # 'R' + 83: 71, # 'S' + 84: 67, # 'T' + 85: 102, # 'U' + 86: 107, # 'V' + 87: 84, # 'W' + 88: 114, # 'X' + 89: 103, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 50, # 'a' + 98: 74, # 'b' + 99: 60, # 'c' + 100: 61, # 'd' + 101: 42, # 'e' + 102: 76, # 'f' + 103: 70, # 'g' + 104: 64, # 'h' + 105: 53, # 'i' + 106: 105, # 'j' + 107: 93, # 'k' + 108: 56, # 'l' + 109: 65, # 'm' + 110: 54, # 'n' + 111: 49, # 'o' + 112: 66, # 'p' + 113: 110, # 'q' + 114: 51, # 'r' + 115: 43, # 's' + 116: 44, # 't' + 117: 63, # 'u' + 118: 81, # 'v' + 119: 77, # 'w' + 120: 98, # 'x' + 121: 75, # 'y' + 122: 108, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 124, # '€' + 129: 202, # None + 130: 203, # '‚' + 131: 204, # 'ƒ' + 132: 205, # '„' + 133: 40, # '…' + 134: 58, # '†' + 135: 206, # '‡' + 136: 207, # 'ˆ' + 137: 208, # '‰' + 138: 209, # None + 139: 210, # '‹' + 140: 211, # None + 141: 212, # None + 142: 213, # None + 143: 214, # None + 144: 215, # None + 145: 83, # '‘' + 146: 52, # '’' + 147: 47, # '“' + 148: 46, # '”' + 149: 72, # '•' + 150: 32, # '–' + 151: 94, # '—' + 152: 216, # '˜' + 153: 113, # '™' + 154: 217, # None + 155: 109, # '›' + 156: 218, # None + 157: 219, # None + 158: 220, # None + 159: 221, # None + 160: 34, # '\xa0' + 161: 116, # '¡' + 162: 222, # '¢' + 163: 118, # '£' + 164: 100, # '₪' + 165: 223, # '¥' + 166: 224, # '¦' + 167: 117, # '§' + 168: 119, # '¨' + 169: 104, # '©' + 170: 125, # '×' + 171: 225, # '«' + 172: 226, # '¬' + 173: 87, # '\xad' + 174: 99, # '®' + 175: 227, # '¯' + 176: 106, # '°' + 177: 122, # '±' + 178: 123, # '²' + 179: 228, # '³' + 180: 55, # '´' + 181: 229, # 'µ' + 182: 230, # '¶' + 183: 101, # '·' + 184: 231, # '¸' + 185: 232, # '¹' + 186: 120, # '÷' + 187: 233, # '»' + 188: 48, # '¼' + 189: 39, # '½' + 190: 57, # '¾' + 191: 234, # '¿' + 192: 30, # 'ְ' + 193: 59, # 'ֱ' + 194: 41, # 'ֲ' + 195: 88, # 'ֳ' + 196: 33, # 'ִ' + 197: 37, # 'ֵ' + 198: 36, # 'ֶ' + 199: 31, # 'ַ' + 200: 29, # 'ָ' + 201: 35, # 'ֹ' + 202: 235, # None + 203: 62, # 'ֻ' + 204: 28, # 'ּ' + 205: 236, # 'ֽ' + 206: 126, # '־' + 207: 237, # 'ֿ' + 208: 238, # '׀' + 209: 38, # 'ׁ' + 210: 45, # 'ׂ' + 211: 239, # '׃' + 212: 240, # 'װ' + 213: 241, # 'ױ' + 214: 242, # 'ײ' + 215: 243, # '׳' + 216: 127, # '״' + 217: 244, # None + 218: 245, # None + 219: 246, # None + 220: 247, # None + 221: 248, # None + 222: 249, # None + 223: 250, # None + 224: 9, # 'א' + 225: 8, # 'ב' + 226: 20, # 'ג' + 227: 16, # 'ד' + 228: 3, # 'ה' + 229: 2, # 'ו' + 230: 24, # 'ז' + 231: 14, # 'ח' + 232: 22, # 'ט' + 233: 1, # 'י' + 234: 25, # 'ך' + 235: 15, # 'כ' + 236: 4, # 'ל' + 237: 11, # 'ם' + 238: 6, # 'מ' + 239: 23, # 'ן' + 240: 12, # 'נ' + 241: 19, # 'ס' + 242: 13, # 'ע' + 243: 26, # 'ף' + 244: 18, # 'פ' + 245: 27, # 'ץ' + 246: 21, # 'צ' + 247: 17, # 'ק' + 248: 7, # 'ר' + 249: 10, # 'ש' + 250: 5, # 'ת' + 251: 251, # None + 252: 252, # None + 253: 128, # '\u200e' + 254: 96, # '\u200f' + 255: 253, # None +} + +WINDOWS_1255_HEBREW_MODEL = SingleByteCharSetModel(charset_name='windows-1255', + language='Hebrew', + char_to_order_map=WINDOWS_1255_HEBREW_CHAR_TO_ORDER, + language_model=HEBREW_LANG_MODEL, + typical_positive_ratio=0.984004, + keep_ascii_letters=False, + alphabet='אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ') + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langhungarianmodel.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langhungarianmodel.py new file mode 100644 index 0000000..8260cb9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langhungarianmodel.py @@ -0,0 +1,4649 @@ +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +HUNGARIAN_LANG_MODEL = { + 28: { # 'A' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 2, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 2, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 2, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 1, # 'Á' + 44: 0, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 40: { # 'B' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 3, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 54: { # 'C' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 3, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 45: { # 'D' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 32: { # 'E' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 2, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 50: { # 'F' + 28: 1, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 0, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 49: { # 'G' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 2, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 38: { # 'H' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 0, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 1, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 2, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 2, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 39: { # 'I' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 2, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 53: { # 'J' + 28: 2, # 'A' + 40: 0, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 0, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 36: { # 'K' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 2, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 41: { # 'L' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 34: { # 'M' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 3, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 1, # 'ű' + }, + 35: { # 'N' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 2, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 2, # 'Y' + 52: 1, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 47: { # 'O' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 2, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 1, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 46: { # 'P' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 3, # 'á' + 15: 2, # 'é' + 30: 0, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 0, # 'ű' + }, + 43: { # 'R' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 2, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 2, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 33: { # 'S' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 3, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 1, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 37: { # 'T' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 2, # 'Á' + 44: 2, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 57: { # 'U' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 48: { # 'V' + 28: 2, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 2, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 55: { # 'Y' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 2, # 'Z' + 2: 1, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 52: { # 'Z' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 1, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 2, # 'Á' + 44: 1, # 'É' + 61: 1, # 'Í' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 2: { # 'a' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 18: { # 'b' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 26: { # 'c' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 1, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 2, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 2, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 17: { # 'd' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 2, # 'k' + 6: 1, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 1: { # 'e' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 2, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 27: { # 'f' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 3, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 3, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 12: { # 'g' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 20: { # 'h' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 9: { # 'i' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 3, # 'ó' + 24: 1, # 'ö' + 31: 2, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 1, # 'ű' + }, + 22: { # 'j' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 1, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 1, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 7: { # 'k' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 1, # 'ú' + 29: 3, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 6: { # 'l' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 3, # 'ő' + 56: 1, # 'ű' + }, + 13: { # 'm' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 1, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 3, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 2, # 'ű' + }, + 4: { # 'n' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 1, # 'x' + 16: 3, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 8: { # 'o' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 23: { # 'p' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 10: { # 'r' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 2, # 'ű' + }, + 5: { # 's' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 3: { # 't' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 1, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 3, # 'ő' + 56: 2, # 'ű' + }, + 21: { # 'u' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 2, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 1, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 1, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 19: { # 'v' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 2, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 1, # 'ű' + }, + 62: { # 'x' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 16: { # 'y' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'ő' + 56: 2, # 'ű' + }, + 11: { # 'z' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'ő' + 56: 1, # 'ű' + }, + 51: { # 'Á' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 1, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 44: { # 'É' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 61: { # 'Í' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 0, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 58: { # 'Ó' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 2, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 1, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 59: { # 'Ö' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 60: { # 'Ú' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 2, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 63: { # 'Ü' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 14: { # 'á' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 15: { # 'é' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 30: { # 'í' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 25: { # 'ó' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 1, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 24: { # 'ö' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 0, # 'a' + 18: 3, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 31: { # 'ú' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 3, # 'j' + 7: 1, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 29: { # 'ü' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 0, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 42: { # 'ő' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, + 56: { # 'ű' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Á' + 44: 0, # 'É' + 61: 0, # 'Í' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'ő' + 56: 0, # 'ű' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 28, # 'A' + 66: 40, # 'B' + 67: 54, # 'C' + 68: 45, # 'D' + 69: 32, # 'E' + 70: 50, # 'F' + 71: 49, # 'G' + 72: 38, # 'H' + 73: 39, # 'I' + 74: 53, # 'J' + 75: 36, # 'K' + 76: 41, # 'L' + 77: 34, # 'M' + 78: 35, # 'N' + 79: 47, # 'O' + 80: 46, # 'P' + 81: 72, # 'Q' + 82: 43, # 'R' + 83: 33, # 'S' + 84: 37, # 'T' + 85: 57, # 'U' + 86: 48, # 'V' + 87: 64, # 'W' + 88: 68, # 'X' + 89: 55, # 'Y' + 90: 52, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 2, # 'a' + 98: 18, # 'b' + 99: 26, # 'c' + 100: 17, # 'd' + 101: 1, # 'e' + 102: 27, # 'f' + 103: 12, # 'g' + 104: 20, # 'h' + 105: 9, # 'i' + 106: 22, # 'j' + 107: 7, # 'k' + 108: 6, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 8, # 'o' + 112: 23, # 'p' + 113: 67, # 'q' + 114: 10, # 'r' + 115: 5, # 's' + 116: 3, # 't' + 117: 21, # 'u' + 118: 19, # 'v' + 119: 65, # 'w' + 120: 62, # 'x' + 121: 16, # 'y' + 122: 11, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 161, # '€' + 129: 162, # None + 130: 163, # '‚' + 131: 164, # None + 132: 165, # '„' + 133: 166, # '…' + 134: 167, # '†' + 135: 168, # '‡' + 136: 169, # None + 137: 170, # '‰' + 138: 171, # 'Š' + 139: 172, # '‹' + 140: 173, # 'Ś' + 141: 174, # 'Ť' + 142: 175, # 'Ž' + 143: 176, # 'Ź' + 144: 177, # None + 145: 178, # '‘' + 146: 179, # '’' + 147: 180, # '“' + 148: 78, # '”' + 149: 181, # '•' + 150: 69, # '–' + 151: 182, # '—' + 152: 183, # None + 153: 184, # '™' + 154: 185, # 'š' + 155: 186, # '›' + 156: 187, # 'ś' + 157: 188, # 'ť' + 158: 189, # 'ž' + 159: 190, # 'ź' + 160: 191, # '\xa0' + 161: 192, # 'ˇ' + 162: 193, # '˘' + 163: 194, # 'Ł' + 164: 195, # '¤' + 165: 196, # 'Ą' + 166: 197, # '¦' + 167: 76, # '§' + 168: 198, # '¨' + 169: 199, # '©' + 170: 200, # 'Ş' + 171: 201, # '«' + 172: 202, # '¬' + 173: 203, # '\xad' + 174: 204, # '®' + 175: 205, # 'Ż' + 176: 81, # '°' + 177: 206, # '±' + 178: 207, # '˛' + 179: 208, # 'ł' + 180: 209, # '´' + 181: 210, # 'µ' + 182: 211, # '¶' + 183: 212, # '·' + 184: 213, # '¸' + 185: 214, # 'ą' + 186: 215, # 'ş' + 187: 216, # '»' + 188: 217, # 'Ľ' + 189: 218, # '˝' + 190: 219, # 'ľ' + 191: 220, # 'ż' + 192: 221, # 'Ŕ' + 193: 51, # 'Á' + 194: 83, # 'Â' + 195: 222, # 'Ă' + 196: 80, # 'Ä' + 197: 223, # 'Ĺ' + 198: 224, # 'Ć' + 199: 225, # 'Ç' + 200: 226, # 'Č' + 201: 44, # 'É' + 202: 227, # 'Ę' + 203: 228, # 'Ë' + 204: 229, # 'Ě' + 205: 61, # 'Í' + 206: 230, # 'Î' + 207: 231, # 'Ď' + 208: 232, # 'Đ' + 209: 233, # 'Ń' + 210: 234, # 'Ň' + 211: 58, # 'Ó' + 212: 235, # 'Ô' + 213: 66, # 'Ő' + 214: 59, # 'Ö' + 215: 236, # '×' + 216: 237, # 'Ř' + 217: 238, # 'Ů' + 218: 60, # 'Ú' + 219: 70, # 'Ű' + 220: 63, # 'Ü' + 221: 239, # 'Ý' + 222: 240, # 'Ţ' + 223: 241, # 'ß' + 224: 84, # 'ŕ' + 225: 14, # 'á' + 226: 75, # 'â' + 227: 242, # 'ă' + 228: 71, # 'ä' + 229: 82, # 'ĺ' + 230: 243, # 'ć' + 231: 73, # 'ç' + 232: 244, # 'č' + 233: 15, # 'é' + 234: 85, # 'ę' + 235: 79, # 'ë' + 236: 86, # 'ě' + 237: 30, # 'í' + 238: 77, # 'î' + 239: 87, # 'ď' + 240: 245, # 'đ' + 241: 246, # 'ń' + 242: 247, # 'ň' + 243: 25, # 'ó' + 244: 74, # 'ô' + 245: 42, # 'ő' + 246: 24, # 'ö' + 247: 248, # '÷' + 248: 249, # 'ř' + 249: 250, # 'ů' + 250: 31, # 'ú' + 251: 56, # 'ű' + 252: 29, # 'ü' + 253: 251, # 'ý' + 254: 252, # 'ţ' + 255: 253, # '˙' +} + +WINDOWS_1250_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1250', + language='Hungarian', + char_to_order_map=WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÁÉÍÓÖÚÜáéíóöúüŐőŰű') + +ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 28, # 'A' + 66: 40, # 'B' + 67: 54, # 'C' + 68: 45, # 'D' + 69: 32, # 'E' + 70: 50, # 'F' + 71: 49, # 'G' + 72: 38, # 'H' + 73: 39, # 'I' + 74: 53, # 'J' + 75: 36, # 'K' + 76: 41, # 'L' + 77: 34, # 'M' + 78: 35, # 'N' + 79: 47, # 'O' + 80: 46, # 'P' + 81: 71, # 'Q' + 82: 43, # 'R' + 83: 33, # 'S' + 84: 37, # 'T' + 85: 57, # 'U' + 86: 48, # 'V' + 87: 64, # 'W' + 88: 68, # 'X' + 89: 55, # 'Y' + 90: 52, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 2, # 'a' + 98: 18, # 'b' + 99: 26, # 'c' + 100: 17, # 'd' + 101: 1, # 'e' + 102: 27, # 'f' + 103: 12, # 'g' + 104: 20, # 'h' + 105: 9, # 'i' + 106: 22, # 'j' + 107: 7, # 'k' + 108: 6, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 8, # 'o' + 112: 23, # 'p' + 113: 67, # 'q' + 114: 10, # 'r' + 115: 5, # 's' + 116: 3, # 't' + 117: 21, # 'u' + 118: 19, # 'v' + 119: 65, # 'w' + 120: 62, # 'x' + 121: 16, # 'y' + 122: 11, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 159, # '\x80' + 129: 160, # '\x81' + 130: 161, # '\x82' + 131: 162, # '\x83' + 132: 163, # '\x84' + 133: 164, # '\x85' + 134: 165, # '\x86' + 135: 166, # '\x87' + 136: 167, # '\x88' + 137: 168, # '\x89' + 138: 169, # '\x8a' + 139: 170, # '\x8b' + 140: 171, # '\x8c' + 141: 172, # '\x8d' + 142: 173, # '\x8e' + 143: 174, # '\x8f' + 144: 175, # '\x90' + 145: 176, # '\x91' + 146: 177, # '\x92' + 147: 178, # '\x93' + 148: 179, # '\x94' + 149: 180, # '\x95' + 150: 181, # '\x96' + 151: 182, # '\x97' + 152: 183, # '\x98' + 153: 184, # '\x99' + 154: 185, # '\x9a' + 155: 186, # '\x9b' + 156: 187, # '\x9c' + 157: 188, # '\x9d' + 158: 189, # '\x9e' + 159: 190, # '\x9f' + 160: 191, # '\xa0' + 161: 192, # 'Ą' + 162: 193, # '˘' + 163: 194, # 'Ł' + 164: 195, # '¤' + 165: 196, # 'Ľ' + 166: 197, # 'Ś' + 167: 75, # '§' + 168: 198, # '¨' + 169: 199, # 'Š' + 170: 200, # 'Ş' + 171: 201, # 'Ť' + 172: 202, # 'Ź' + 173: 203, # '\xad' + 174: 204, # 'Ž' + 175: 205, # 'Ż' + 176: 79, # '°' + 177: 206, # 'ą' + 178: 207, # '˛' + 179: 208, # 'ł' + 180: 209, # '´' + 181: 210, # 'ľ' + 182: 211, # 'ś' + 183: 212, # 'ˇ' + 184: 213, # '¸' + 185: 214, # 'š' + 186: 215, # 'ş' + 187: 216, # 'ť' + 188: 217, # 'ź' + 189: 218, # '˝' + 190: 219, # 'ž' + 191: 220, # 'ż' + 192: 221, # 'Ŕ' + 193: 51, # 'Á' + 194: 81, # 'Â' + 195: 222, # 'Ă' + 196: 78, # 'Ä' + 197: 223, # 'Ĺ' + 198: 224, # 'Ć' + 199: 225, # 'Ç' + 200: 226, # 'Č' + 201: 44, # 'É' + 202: 227, # 'Ę' + 203: 228, # 'Ë' + 204: 229, # 'Ě' + 205: 61, # 'Í' + 206: 230, # 'Î' + 207: 231, # 'Ď' + 208: 232, # 'Đ' + 209: 233, # 'Ń' + 210: 234, # 'Ň' + 211: 58, # 'Ó' + 212: 235, # 'Ô' + 213: 66, # 'Ő' + 214: 59, # 'Ö' + 215: 236, # '×' + 216: 237, # 'Ř' + 217: 238, # 'Ů' + 218: 60, # 'Ú' + 219: 69, # 'Ű' + 220: 63, # 'Ü' + 221: 239, # 'Ý' + 222: 240, # 'Ţ' + 223: 241, # 'ß' + 224: 82, # 'ŕ' + 225: 14, # 'á' + 226: 74, # 'â' + 227: 242, # 'ă' + 228: 70, # 'ä' + 229: 80, # 'ĺ' + 230: 243, # 'ć' + 231: 72, # 'ç' + 232: 244, # 'č' + 233: 15, # 'é' + 234: 83, # 'ę' + 235: 77, # 'ë' + 236: 84, # 'ě' + 237: 30, # 'í' + 238: 76, # 'î' + 239: 85, # 'ď' + 240: 245, # 'đ' + 241: 246, # 'ń' + 242: 247, # 'ň' + 243: 25, # 'ó' + 244: 73, # 'ô' + 245: 42, # 'ő' + 246: 24, # 'ö' + 247: 248, # '÷' + 248: 249, # 'ř' + 249: 250, # 'ů' + 250: 31, # 'ú' + 251: 56, # 'ű' + 252: 29, # 'ü' + 253: 251, # 'ý' + 254: 252, # 'ţ' + 255: 253, # '˙' +} + +ISO_8859_2_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-2', + language='Hungarian', + char_to_order_map=ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÁÉÍÓÖÚÜáéíóöúüŐőŰű') + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langrussianmodel.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langrussianmodel.py new file mode 100644 index 0000000..c1a060d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langrussianmodel.py @@ -0,0 +1,5717 @@ +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +RUSSIAN_LANG_MODEL = { + 37: { # 'А' + 37: 0, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 44: { # 'Б' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 2, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 33: { # 'В' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 0, # 'ю' + 16: 1, # 'я' + }, + 46: { # 'Г' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 41: { # 'Д' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 3, # 'ж' + 20: 1, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 48: { # 'Е' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 2, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 1, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'р' + 7: 3, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 56: { # 'Ж' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 1, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 2, # 'ю' + 16: 0, # 'я' + }, + 51: { # 'З' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 1, # 'я' + }, + 42: { # 'И' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 2, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 60: { # 'Й' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 36: { # 'К' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 2, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 49: { # 'Л' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 0, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 2, # 'ю' + 16: 1, # 'я' + }, + 38: { # 'М' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 1, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 31: { # 'Н' + 37: 2, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 2, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 34: { # 'О' + 37: 0, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 2, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 2, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 35: { # 'П' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 1, # 'с' + 6: 1, # 'т' + 14: 2, # 'у' + 39: 1, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 2, # 'я' + }, + 45: { # 'Р' + 37: 2, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 2, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 2, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 2, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 2, # 'я' + }, + 32: { # 'С' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 2, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 40: { # 'Т' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 2, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 52: { # 'У' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 1, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 1, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 0, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 53: { # 'Ф' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 1, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 55: { # 'Х' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 58: { # 'Ц' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 50: { # 'Ч' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 57: { # 'Ш' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 63: { # 'Щ' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 62: { # 'Ы' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 61: { # 'Ь' + 37: 0, # 'А' + 44: 1, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 47: { # 'Э' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 59: { # 'Ю' + 37: 1, # 'А' + 44: 1, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 43: { # 'Я' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 1, # 'В' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Х' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 1, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'р' + 7: 1, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 3: { # 'а' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 21: { # 'б' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 2, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 10: { # 'в' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 19: { # 'г' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 13: { # 'д' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 3, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 2: { # 'е' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 24: { # 'ж' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 1, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 0, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 20: { # 'з' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 4: { # 'и' + 37: 1, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 23: { # 'й' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 2, # 'я' + }, + 11: { # 'к' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 3, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 1, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 8: { # 'л' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 3, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 1, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 1, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 12: { # 'м' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 5: { # 'н' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 2, # 'щ' + 54: 1, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 1: { # 'о' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 15: { # 'п' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 0, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 2, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 3, # 'я' + }, + 9: { # 'р' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 2, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 7: { # 'с' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 6: { # 'т' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 2, # 'щ' + 54: 2, # 'ъ' + 18: 3, # 'ы' + 17: 3, # 'ь' + 30: 2, # 'э' + 27: 2, # 'ю' + 16: 3, # 'я' + }, + 14: { # 'у' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 2, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 2, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 2, # 'э' + 27: 3, # 'ю' + 16: 2, # 'я' + }, + 39: { # 'ф' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 2, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 2, # 'ы' + 17: 1, # 'ь' + 30: 2, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 26: { # 'х' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 3, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 3, # 'р' + 7: 2, # 'с' + 6: 2, # 'т' + 14: 2, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ъ' + 18: 0, # 'ы' + 17: 1, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 28: { # 'ц' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'р' + 7: 0, # 'с' + 6: 1, # 'т' + 14: 3, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 1, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 3, # 'ы' + 17: 1, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 22: { # 'ч' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 3, # 'т' + 14: 3, # 'у' + 39: 1, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 3, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 25: { # 'ш' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 1, # 'с' + 6: 2, # 'т' + 14: 3, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 3, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 0, # 'я' + }, + 29: { # 'щ' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 2, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 2, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 2, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 0, # 'я' + }, + 54: { # 'ъ' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'р' + 7: 0, # 'с' + 6: 0, # 'т' + 14: 0, # 'у' + 39: 0, # 'ф' + 26: 0, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 1, # 'ю' + 16: 2, # 'я' + }, + 18: { # 'ы' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 1, # 'о' + 15: 3, # 'п' + 9: 3, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 0, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 0, # 'ю' + 16: 2, # 'я' + }, + 17: { # 'ь' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 0, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 1, # 'р' + 7: 3, # 'с' + 6: 2, # 'т' + 14: 0, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 3, # 'ю' + 16: 3, # 'я' + }, + 30: { # 'э' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 2, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 2, # 'ф' + 26: 1, # 'х' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 1, # 'ю' + 16: 1, # 'я' + }, + 27: { # 'ю' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 1, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 0, # 'у' + 39: 1, # 'ф' + 26: 2, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 1, # 'э' + 27: 2, # 'ю' + 16: 1, # 'я' + }, + 16: { # 'я' + 37: 0, # 'А' + 44: 0, # 'Б' + 33: 0, # 'В' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Н' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Х' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 2, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'р' + 7: 3, # 'с' + 6: 3, # 'т' + 14: 1, # 'у' + 39: 1, # 'ф' + 26: 3, # 'х' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ъ' + 18: 0, # 'ы' + 17: 0, # 'ь' + 30: 0, # 'э' + 27: 2, # 'ю' + 16: 2, # 'я' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +IBM866_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 37, # 'А' + 129: 44, # 'Б' + 130: 33, # 'В' + 131: 46, # 'Г' + 132: 41, # 'Д' + 133: 48, # 'Е' + 134: 56, # 'Ж' + 135: 51, # 'З' + 136: 42, # 'И' + 137: 60, # 'Й' + 138: 36, # 'К' + 139: 49, # 'Л' + 140: 38, # 'М' + 141: 31, # 'Н' + 142: 34, # 'О' + 143: 35, # 'П' + 144: 45, # 'Р' + 145: 32, # 'С' + 146: 40, # 'Т' + 147: 52, # 'У' + 148: 53, # 'Ф' + 149: 55, # 'Х' + 150: 58, # 'Ц' + 151: 50, # 'Ч' + 152: 57, # 'Ш' + 153: 63, # 'Щ' + 154: 70, # 'Ъ' + 155: 62, # 'Ы' + 156: 61, # 'Ь' + 157: 47, # 'Э' + 158: 59, # 'Ю' + 159: 43, # 'Я' + 160: 3, # 'а' + 161: 21, # 'б' + 162: 10, # 'в' + 163: 19, # 'г' + 164: 13, # 'д' + 165: 2, # 'е' + 166: 24, # 'ж' + 167: 20, # 'з' + 168: 4, # 'и' + 169: 23, # 'й' + 170: 11, # 'к' + 171: 8, # 'л' + 172: 12, # 'м' + 173: 5, # 'н' + 174: 1, # 'о' + 175: 15, # 'п' + 176: 191, # '░' + 177: 192, # '▒' + 178: 193, # '▓' + 179: 194, # '│' + 180: 195, # '┤' + 181: 196, # '╡' + 182: 197, # '╢' + 183: 198, # '╖' + 184: 199, # '╕' + 185: 200, # '╣' + 186: 201, # '║' + 187: 202, # '╗' + 188: 203, # '╝' + 189: 204, # '╜' + 190: 205, # '╛' + 191: 206, # '┐' + 192: 207, # '└' + 193: 208, # '┴' + 194: 209, # '┬' + 195: 210, # '├' + 196: 211, # '─' + 197: 212, # '┼' + 198: 213, # '╞' + 199: 214, # '╟' + 200: 215, # '╚' + 201: 216, # '╔' + 202: 217, # '╩' + 203: 218, # '╦' + 204: 219, # '╠' + 205: 220, # '═' + 206: 221, # '╬' + 207: 222, # '╧' + 208: 223, # '╨' + 209: 224, # '╤' + 210: 225, # '╥' + 211: 226, # '╙' + 212: 227, # '╘' + 213: 228, # '╒' + 214: 229, # '╓' + 215: 230, # '╫' + 216: 231, # '╪' + 217: 232, # '┘' + 218: 233, # '┌' + 219: 234, # '█' + 220: 235, # '▄' + 221: 236, # '▌' + 222: 237, # '▐' + 223: 238, # '▀' + 224: 9, # 'р' + 225: 7, # 'с' + 226: 6, # 'т' + 227: 14, # 'у' + 228: 39, # 'ф' + 229: 26, # 'х' + 230: 28, # 'ц' + 231: 22, # 'ч' + 232: 25, # 'ш' + 233: 29, # 'щ' + 234: 54, # 'ъ' + 235: 18, # 'ы' + 236: 17, # 'ь' + 237: 30, # 'э' + 238: 27, # 'ю' + 239: 16, # 'я' + 240: 239, # 'Ё' + 241: 68, # 'ё' + 242: 240, # 'Є' + 243: 241, # 'є' + 244: 242, # 'Ї' + 245: 243, # 'ї' + 246: 244, # 'Ў' + 247: 245, # 'ў' + 248: 246, # '°' + 249: 247, # '∙' + 250: 248, # '·' + 251: 249, # '√' + 252: 250, # '№' + 253: 251, # '¤' + 254: 252, # '■' + 255: 255, # '\xa0' +} + +IBM866_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM866', + language='Russian', + char_to_order_map=IBM866_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + +WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # 'Ђ' + 129: 192, # 'Ѓ' + 130: 193, # '‚' + 131: 194, # 'ѓ' + 132: 195, # '„' + 133: 196, # '…' + 134: 197, # '†' + 135: 198, # '‡' + 136: 199, # '€' + 137: 200, # '‰' + 138: 201, # 'Љ' + 139: 202, # '‹' + 140: 203, # 'Њ' + 141: 204, # 'Ќ' + 142: 205, # 'Ћ' + 143: 206, # 'Џ' + 144: 207, # 'ђ' + 145: 208, # '‘' + 146: 209, # '’' + 147: 210, # '“' + 148: 211, # '”' + 149: 212, # '•' + 150: 213, # '–' + 151: 214, # '—' + 152: 215, # None + 153: 216, # '™' + 154: 217, # 'љ' + 155: 218, # '›' + 156: 219, # 'њ' + 157: 220, # 'ќ' + 158: 221, # 'ћ' + 159: 222, # 'џ' + 160: 223, # '\xa0' + 161: 224, # 'Ў' + 162: 225, # 'ў' + 163: 226, # 'Ј' + 164: 227, # '¤' + 165: 228, # 'Ґ' + 166: 229, # '¦' + 167: 230, # '§' + 168: 231, # 'Ё' + 169: 232, # '©' + 170: 233, # 'Є' + 171: 234, # '«' + 172: 235, # '¬' + 173: 236, # '\xad' + 174: 237, # '®' + 175: 238, # 'Ї' + 176: 239, # '°' + 177: 240, # '±' + 178: 241, # 'І' + 179: 242, # 'і' + 180: 243, # 'ґ' + 181: 244, # 'µ' + 182: 245, # '¶' + 183: 246, # '·' + 184: 68, # 'ё' + 185: 247, # '№' + 186: 248, # 'є' + 187: 249, # '»' + 188: 250, # 'ј' + 189: 251, # 'Ѕ' + 190: 252, # 'ѕ' + 191: 253, # 'ї' + 192: 37, # 'А' + 193: 44, # 'Б' + 194: 33, # 'В' + 195: 46, # 'Г' + 196: 41, # 'Д' + 197: 48, # 'Е' + 198: 56, # 'Ж' + 199: 51, # 'З' + 200: 42, # 'И' + 201: 60, # 'Й' + 202: 36, # 'К' + 203: 49, # 'Л' + 204: 38, # 'М' + 205: 31, # 'Н' + 206: 34, # 'О' + 207: 35, # 'П' + 208: 45, # 'Р' + 209: 32, # 'С' + 210: 40, # 'Т' + 211: 52, # 'У' + 212: 53, # 'Ф' + 213: 55, # 'Х' + 214: 58, # 'Ц' + 215: 50, # 'Ч' + 216: 57, # 'Ш' + 217: 63, # 'Щ' + 218: 70, # 'Ъ' + 219: 62, # 'Ы' + 220: 61, # 'Ь' + 221: 47, # 'Э' + 222: 59, # 'Ю' + 223: 43, # 'Я' + 224: 3, # 'а' + 225: 21, # 'б' + 226: 10, # 'в' + 227: 19, # 'г' + 228: 13, # 'д' + 229: 2, # 'е' + 230: 24, # 'ж' + 231: 20, # 'з' + 232: 4, # 'и' + 233: 23, # 'й' + 234: 11, # 'к' + 235: 8, # 'л' + 236: 12, # 'м' + 237: 5, # 'н' + 238: 1, # 'о' + 239: 15, # 'п' + 240: 9, # 'р' + 241: 7, # 'с' + 242: 6, # 'т' + 243: 14, # 'у' + 244: 39, # 'ф' + 245: 26, # 'х' + 246: 28, # 'ц' + 247: 22, # 'ч' + 248: 25, # 'ш' + 249: 29, # 'щ' + 250: 54, # 'ъ' + 251: 18, # 'ы' + 252: 17, # 'ь' + 253: 30, # 'э' + 254: 27, # 'ю' + 255: 16, # 'я' +} + +WINDOWS_1251_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251', + language='Russian', + char_to_order_map=WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + +IBM855_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # 'ђ' + 129: 192, # 'Ђ' + 130: 193, # 'ѓ' + 131: 194, # 'Ѓ' + 132: 68, # 'ё' + 133: 195, # 'Ё' + 134: 196, # 'є' + 135: 197, # 'Є' + 136: 198, # 'ѕ' + 137: 199, # 'Ѕ' + 138: 200, # 'і' + 139: 201, # 'І' + 140: 202, # 'ї' + 141: 203, # 'Ї' + 142: 204, # 'ј' + 143: 205, # 'Ј' + 144: 206, # 'љ' + 145: 207, # 'Љ' + 146: 208, # 'њ' + 147: 209, # 'Њ' + 148: 210, # 'ћ' + 149: 211, # 'Ћ' + 150: 212, # 'ќ' + 151: 213, # 'Ќ' + 152: 214, # 'ў' + 153: 215, # 'Ў' + 154: 216, # 'џ' + 155: 217, # 'Џ' + 156: 27, # 'ю' + 157: 59, # 'Ю' + 158: 54, # 'ъ' + 159: 70, # 'Ъ' + 160: 3, # 'а' + 161: 37, # 'А' + 162: 21, # 'б' + 163: 44, # 'Б' + 164: 28, # 'ц' + 165: 58, # 'Ц' + 166: 13, # 'д' + 167: 41, # 'Д' + 168: 2, # 'е' + 169: 48, # 'Е' + 170: 39, # 'ф' + 171: 53, # 'Ф' + 172: 19, # 'г' + 173: 46, # 'Г' + 174: 218, # '«' + 175: 219, # '»' + 176: 220, # '░' + 177: 221, # '▒' + 178: 222, # '▓' + 179: 223, # '│' + 180: 224, # '┤' + 181: 26, # 'х' + 182: 55, # 'Х' + 183: 4, # 'и' + 184: 42, # 'И' + 185: 225, # '╣' + 186: 226, # '║' + 187: 227, # '╗' + 188: 228, # '╝' + 189: 23, # 'й' + 190: 60, # 'Й' + 191: 229, # '┐' + 192: 230, # '└' + 193: 231, # '┴' + 194: 232, # '┬' + 195: 233, # '├' + 196: 234, # '─' + 197: 235, # '┼' + 198: 11, # 'к' + 199: 36, # 'К' + 200: 236, # '╚' + 201: 237, # '╔' + 202: 238, # '╩' + 203: 239, # '╦' + 204: 240, # '╠' + 205: 241, # '═' + 206: 242, # '╬' + 207: 243, # '¤' + 208: 8, # 'л' + 209: 49, # 'Л' + 210: 12, # 'м' + 211: 38, # 'М' + 212: 5, # 'н' + 213: 31, # 'Н' + 214: 1, # 'о' + 215: 34, # 'О' + 216: 15, # 'п' + 217: 244, # '┘' + 218: 245, # '┌' + 219: 246, # '█' + 220: 247, # '▄' + 221: 35, # 'П' + 222: 16, # 'я' + 223: 248, # '▀' + 224: 43, # 'Я' + 225: 9, # 'р' + 226: 45, # 'Р' + 227: 7, # 'с' + 228: 32, # 'С' + 229: 6, # 'т' + 230: 40, # 'Т' + 231: 14, # 'у' + 232: 52, # 'У' + 233: 24, # 'ж' + 234: 56, # 'Ж' + 235: 10, # 'в' + 236: 33, # 'В' + 237: 17, # 'ь' + 238: 61, # 'Ь' + 239: 249, # '№' + 240: 250, # '\xad' + 241: 18, # 'ы' + 242: 62, # 'Ы' + 243: 20, # 'з' + 244: 51, # 'З' + 245: 25, # 'ш' + 246: 57, # 'Ш' + 247: 30, # 'э' + 248: 47, # 'Э' + 249: 29, # 'щ' + 250: 63, # 'Щ' + 251: 22, # 'ч' + 252: 50, # 'Ч' + 253: 251, # '§' + 254: 252, # '■' + 255: 255, # '\xa0' +} + +IBM855_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM855', + language='Russian', + char_to_order_map=IBM855_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + +KOI8_R_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # '─' + 129: 192, # '│' + 130: 193, # '┌' + 131: 194, # '┐' + 132: 195, # '└' + 133: 196, # '┘' + 134: 197, # '├' + 135: 198, # '┤' + 136: 199, # '┬' + 137: 200, # '┴' + 138: 201, # '┼' + 139: 202, # '▀' + 140: 203, # '▄' + 141: 204, # '█' + 142: 205, # '▌' + 143: 206, # '▐' + 144: 207, # '░' + 145: 208, # '▒' + 146: 209, # '▓' + 147: 210, # '⌠' + 148: 211, # '■' + 149: 212, # '∙' + 150: 213, # '√' + 151: 214, # '≈' + 152: 215, # '≤' + 153: 216, # '≥' + 154: 217, # '\xa0' + 155: 218, # '⌡' + 156: 219, # '°' + 157: 220, # '²' + 158: 221, # '·' + 159: 222, # '÷' + 160: 223, # '═' + 161: 224, # '║' + 162: 225, # '╒' + 163: 68, # 'ё' + 164: 226, # '╓' + 165: 227, # '╔' + 166: 228, # '╕' + 167: 229, # '╖' + 168: 230, # '╗' + 169: 231, # '╘' + 170: 232, # '╙' + 171: 233, # '╚' + 172: 234, # '╛' + 173: 235, # '╜' + 174: 236, # '╝' + 175: 237, # '╞' + 176: 238, # '╟' + 177: 239, # '╠' + 178: 240, # '╡' + 179: 241, # 'Ё' + 180: 242, # '╢' + 181: 243, # '╣' + 182: 244, # '╤' + 183: 245, # '╥' + 184: 246, # '╦' + 185: 247, # '╧' + 186: 248, # '╨' + 187: 249, # '╩' + 188: 250, # '╪' + 189: 251, # '╫' + 190: 252, # '╬' + 191: 253, # '©' + 192: 27, # 'ю' + 193: 3, # 'а' + 194: 21, # 'б' + 195: 28, # 'ц' + 196: 13, # 'д' + 197: 2, # 'е' + 198: 39, # 'ф' + 199: 19, # 'г' + 200: 26, # 'х' + 201: 4, # 'и' + 202: 23, # 'й' + 203: 11, # 'к' + 204: 8, # 'л' + 205: 12, # 'м' + 206: 5, # 'н' + 207: 1, # 'о' + 208: 15, # 'п' + 209: 16, # 'я' + 210: 9, # 'р' + 211: 7, # 'с' + 212: 6, # 'т' + 213: 14, # 'у' + 214: 24, # 'ж' + 215: 10, # 'в' + 216: 17, # 'ь' + 217: 18, # 'ы' + 218: 20, # 'з' + 219: 25, # 'ш' + 220: 30, # 'э' + 221: 29, # 'щ' + 222: 22, # 'ч' + 223: 54, # 'ъ' + 224: 59, # 'Ю' + 225: 37, # 'А' + 226: 44, # 'Б' + 227: 58, # 'Ц' + 228: 41, # 'Д' + 229: 48, # 'Е' + 230: 53, # 'Ф' + 231: 46, # 'Г' + 232: 55, # 'Х' + 233: 42, # 'И' + 234: 60, # 'Й' + 235: 36, # 'К' + 236: 49, # 'Л' + 237: 38, # 'М' + 238: 31, # 'Н' + 239: 34, # 'О' + 240: 35, # 'П' + 241: 43, # 'Я' + 242: 45, # 'Р' + 243: 32, # 'С' + 244: 40, # 'Т' + 245: 52, # 'У' + 246: 56, # 'Ж' + 247: 33, # 'В' + 248: 61, # 'Ь' + 249: 62, # 'Ы' + 250: 51, # 'З' + 251: 57, # 'Ш' + 252: 47, # 'Э' + 253: 63, # 'Щ' + 254: 50, # 'Ч' + 255: 70, # 'Ъ' +} + +KOI8_R_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='KOI8-R', + language='Russian', + char_to_order_map=KOI8_R_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + +MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 37, # 'А' + 129: 44, # 'Б' + 130: 33, # 'В' + 131: 46, # 'Г' + 132: 41, # 'Д' + 133: 48, # 'Е' + 134: 56, # 'Ж' + 135: 51, # 'З' + 136: 42, # 'И' + 137: 60, # 'Й' + 138: 36, # 'К' + 139: 49, # 'Л' + 140: 38, # 'М' + 141: 31, # 'Н' + 142: 34, # 'О' + 143: 35, # 'П' + 144: 45, # 'Р' + 145: 32, # 'С' + 146: 40, # 'Т' + 147: 52, # 'У' + 148: 53, # 'Ф' + 149: 55, # 'Х' + 150: 58, # 'Ц' + 151: 50, # 'Ч' + 152: 57, # 'Ш' + 153: 63, # 'Щ' + 154: 70, # 'Ъ' + 155: 62, # 'Ы' + 156: 61, # 'Ь' + 157: 47, # 'Э' + 158: 59, # 'Ю' + 159: 43, # 'Я' + 160: 191, # '†' + 161: 192, # '°' + 162: 193, # 'Ґ' + 163: 194, # '£' + 164: 195, # '§' + 165: 196, # '•' + 166: 197, # '¶' + 167: 198, # 'І' + 168: 199, # '®' + 169: 200, # '©' + 170: 201, # '™' + 171: 202, # 'Ђ' + 172: 203, # 'ђ' + 173: 204, # '≠' + 174: 205, # 'Ѓ' + 175: 206, # 'ѓ' + 176: 207, # '∞' + 177: 208, # '±' + 178: 209, # '≤' + 179: 210, # '≥' + 180: 211, # 'і' + 181: 212, # 'µ' + 182: 213, # 'ґ' + 183: 214, # 'Ј' + 184: 215, # 'Є' + 185: 216, # 'є' + 186: 217, # 'Ї' + 187: 218, # 'ї' + 188: 219, # 'Љ' + 189: 220, # 'љ' + 190: 221, # 'Њ' + 191: 222, # 'њ' + 192: 223, # 'ј' + 193: 224, # 'Ѕ' + 194: 225, # '¬' + 195: 226, # '√' + 196: 227, # 'ƒ' + 197: 228, # '≈' + 198: 229, # '∆' + 199: 230, # '«' + 200: 231, # '»' + 201: 232, # '…' + 202: 233, # '\xa0' + 203: 234, # 'Ћ' + 204: 235, # 'ћ' + 205: 236, # 'Ќ' + 206: 237, # 'ќ' + 207: 238, # 'ѕ' + 208: 239, # '–' + 209: 240, # '—' + 210: 241, # '“' + 211: 242, # '”' + 212: 243, # '‘' + 213: 244, # '’' + 214: 245, # '÷' + 215: 246, # '„' + 216: 247, # 'Ў' + 217: 248, # 'ў' + 218: 249, # 'Џ' + 219: 250, # 'џ' + 220: 251, # '№' + 221: 252, # 'Ё' + 222: 68, # 'ё' + 223: 16, # 'я' + 224: 3, # 'а' + 225: 21, # 'б' + 226: 10, # 'в' + 227: 19, # 'г' + 228: 13, # 'д' + 229: 2, # 'е' + 230: 24, # 'ж' + 231: 20, # 'з' + 232: 4, # 'и' + 233: 23, # 'й' + 234: 11, # 'к' + 235: 8, # 'л' + 236: 12, # 'м' + 237: 5, # 'н' + 238: 1, # 'о' + 239: 15, # 'п' + 240: 9, # 'р' + 241: 7, # 'с' + 242: 6, # 'т' + 243: 14, # 'у' + 244: 39, # 'ф' + 245: 26, # 'х' + 246: 28, # 'ц' + 247: 22, # 'ч' + 248: 25, # 'ш' + 249: 29, # 'щ' + 250: 54, # 'ъ' + 251: 18, # 'ы' + 252: 17, # 'ь' + 253: 30, # 'э' + 254: 27, # 'ю' + 255: 255, # '€' +} + +MACCYRILLIC_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='MacCyrillic', + language='Russian', + char_to_order_map=MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + +ISO_8859_5_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # '\x80' + 129: 192, # '\x81' + 130: 193, # '\x82' + 131: 194, # '\x83' + 132: 195, # '\x84' + 133: 196, # '\x85' + 134: 197, # '\x86' + 135: 198, # '\x87' + 136: 199, # '\x88' + 137: 200, # '\x89' + 138: 201, # '\x8a' + 139: 202, # '\x8b' + 140: 203, # '\x8c' + 141: 204, # '\x8d' + 142: 205, # '\x8e' + 143: 206, # '\x8f' + 144: 207, # '\x90' + 145: 208, # '\x91' + 146: 209, # '\x92' + 147: 210, # '\x93' + 148: 211, # '\x94' + 149: 212, # '\x95' + 150: 213, # '\x96' + 151: 214, # '\x97' + 152: 215, # '\x98' + 153: 216, # '\x99' + 154: 217, # '\x9a' + 155: 218, # '\x9b' + 156: 219, # '\x9c' + 157: 220, # '\x9d' + 158: 221, # '\x9e' + 159: 222, # '\x9f' + 160: 223, # '\xa0' + 161: 224, # 'Ё' + 162: 225, # 'Ђ' + 163: 226, # 'Ѓ' + 164: 227, # 'Є' + 165: 228, # 'Ѕ' + 166: 229, # 'І' + 167: 230, # 'Ї' + 168: 231, # 'Ј' + 169: 232, # 'Љ' + 170: 233, # 'Њ' + 171: 234, # 'Ћ' + 172: 235, # 'Ќ' + 173: 236, # '\xad' + 174: 237, # 'Ў' + 175: 238, # 'Џ' + 176: 37, # 'А' + 177: 44, # 'Б' + 178: 33, # 'В' + 179: 46, # 'Г' + 180: 41, # 'Д' + 181: 48, # 'Е' + 182: 56, # 'Ж' + 183: 51, # 'З' + 184: 42, # 'И' + 185: 60, # 'Й' + 186: 36, # 'К' + 187: 49, # 'Л' + 188: 38, # 'М' + 189: 31, # 'Н' + 190: 34, # 'О' + 191: 35, # 'П' + 192: 45, # 'Р' + 193: 32, # 'С' + 194: 40, # 'Т' + 195: 52, # 'У' + 196: 53, # 'Ф' + 197: 55, # 'Х' + 198: 58, # 'Ц' + 199: 50, # 'Ч' + 200: 57, # 'Ш' + 201: 63, # 'Щ' + 202: 70, # 'Ъ' + 203: 62, # 'Ы' + 204: 61, # 'Ь' + 205: 47, # 'Э' + 206: 59, # 'Ю' + 207: 43, # 'Я' + 208: 3, # 'а' + 209: 21, # 'б' + 210: 10, # 'в' + 211: 19, # 'г' + 212: 13, # 'д' + 213: 2, # 'е' + 214: 24, # 'ж' + 215: 20, # 'з' + 216: 4, # 'и' + 217: 23, # 'й' + 218: 11, # 'к' + 219: 8, # 'л' + 220: 12, # 'м' + 221: 5, # 'н' + 222: 1, # 'о' + 223: 15, # 'п' + 224: 9, # 'р' + 225: 7, # 'с' + 226: 6, # 'т' + 227: 14, # 'у' + 228: 39, # 'ф' + 229: 26, # 'х' + 230: 28, # 'ц' + 231: 22, # 'ч' + 232: 25, # 'ш' + 233: 29, # 'щ' + 234: 54, # 'ъ' + 235: 18, # 'ы' + 236: 17, # 'ь' + 237: 30, # 'э' + 238: 27, # 'ю' + 239: 16, # 'я' + 240: 239, # '№' + 241: 68, # 'ё' + 242: 240, # 'ђ' + 243: 241, # 'ѓ' + 244: 242, # 'є' + 245: 243, # 'ѕ' + 246: 244, # 'і' + 247: 245, # 'ї' + 248: 246, # 'ј' + 249: 247, # 'љ' + 250: 248, # 'њ' + 251: 249, # 'ћ' + 252: 250, # 'ќ' + 253: 251, # '§' + 254: 252, # 'ў' + 255: 255, # 'џ' +} + +ISO_8859_5_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5', + language='Russian', + char_to_order_map=ISO_8859_5_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё') + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langthaimodel.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langthaimodel.py new file mode 100644 index 0000000..24e0df2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langthaimodel.py @@ -0,0 +1,4382 @@ +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +THAI_LANG_MODEL = { + 5: { # 'ก' + 5: 2, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 3, # 'ฎ' + 57: 2, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 2, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 2, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 3, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 1, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 30: { # 'ข' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 0, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 2, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 2, # 'ี' + 40: 3, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 24: { # 'ค' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 2, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 3, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 8: { # 'ง' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 1, # 'ฉ' + 34: 2, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 2, # 'ศ' + 46: 1, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 3, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 26: { # 'จ' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 3, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 52: { # 'ฉ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 3, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 1, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 34: { # 'ช' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 1, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 51: { # 'ซ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 3, # 'ึ' + 27: 2, # 'ื' + 32: 1, # 'ุ' + 35: 1, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 1, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 47: { # 'ญ' + 5: 1, # 'ก' + 30: 1, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 3, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 2, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 58: { # 'ฎ' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 1, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 57: { # 'ฏ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 49: { # 'ฐ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 53: { # 'ฑ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 55: { # 'ฒ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 43: { # 'ณ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 3, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 3, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 3, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 20: { # 'ด' + 5: 2, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 2, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 1, # 'ึ' + 27: 2, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 2, # 'ๆ' + 37: 2, # '็' + 6: 1, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 19: { # 'ต' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 2, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 2, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 1, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 2, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 44: { # 'ถ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 3, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 14: { # 'ท' + 5: 1, # 'ก' + 30: 1, # 'ข' + 24: 3, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 3, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 1, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 3, # 'ศ' + 46: 1, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 48: { # 'ธ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 2, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 2, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 3: { # 'น' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 2, # 'ถ' + 14: 3, # 'ท' + 48: 3, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 1, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 3, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 3, # 'โ' + 29: 3, # 'ใ' + 33: 3, # 'ไ' + 50: 2, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 17: { # 'บ' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 1, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 2, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 2, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 25: { # 'ป' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 1, # 'ฎ' + 57: 3, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 1, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 2, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 1, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 39: { # 'ผ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 1, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 0, # 'ุ' + 35: 3, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 1, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 62: { # 'ฝ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 1, # 'ี' + 40: 2, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 1, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 31: { # 'พ' + 5: 1, # 'ก' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 2, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 1, # 'ึ' + 27: 3, # 'ื' + 32: 1, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 0, # '่' + 7: 1, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 54: { # 'ฟ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 45: { # 'ภ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 2, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 9: { # 'ม' + 5: 2, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 3, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 2, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 1, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 2, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 16: { # 'ย' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 2, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 1, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 2, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 2: { # 'ร' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 2, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 3, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 3, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 3, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 2, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 2, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 3, # 'เ' + 28: 3, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 61: { # 'ฤ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 2, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 15: { # 'ล' + 5: 2, # 'ก' + 30: 3, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 3, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 2, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 2, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 12: { # 'ว' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 2, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 42: { # 'ศ' + 5: 1, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 2, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 3, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 2, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'แ' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 46: { # 'ษ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 2, # 'ฎ' + 57: 1, # 'ฏ' + 49: 2, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 18: { # 'ส' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 3, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 2, # 'ภ' + 9: 3, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 0, # 'แ' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 1, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 21: { # 'ห' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 4: { # 'อ' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 63: { # 'ฯ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 22: { # 'ะ' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 1, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 10: { # 'ั' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 3, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 2, # 'ฐ' + 53: 0, # 'ฑ' + 55: 3, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 1: { # 'า' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 1, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 2, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'ฝ' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 3, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 3, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 36: { # 'ำ' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 23: { # 'ิ' + 5: 3, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 3, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 2, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 3, # 'ศ' + 46: 2, # 'ษ' + 18: 2, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 13: { # 'ี' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 40: { # 'ึ' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 3, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 27: { # 'ื' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 32: { # 'ุ' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 3, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 1, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 1, # 'ศ' + 46: 2, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 35: { # 'ู' + 5: 3, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'แ' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 11: { # 'เ' + 5: 3, # 'ก' + 30: 3, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 3, # 'ฉ' + 34: 3, # 'ช' + 51: 2, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 3, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 3, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 28: { # 'แ' + 5: 3, # 'ก' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 3, # 'ต' + 44: 2, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 3, # 'ผ' + 62: 0, # 'ฝ' + 31: 2, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 41: { # 'โ' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 1, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 29: { # 'ใ' + 5: 2, # 'ก' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 33: { # 'ไ' + 5: 1, # 'ก' + 30: 2, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 1, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 2, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 50: { # 'ๆ' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 37: { # '็' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 6: { # '่' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 1, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'ฝ' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 7: { # '้' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 38: { # '์' + 5: 2, # 'ก' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 1, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'แ' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 56: { # '๑' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 2, # '๑' + 59: 1, # '๒' + 60: 1, # '๕' + }, + 59: { # '๒' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 1, # '๑' + 59: 1, # '๒' + 60: 3, # '๕' + }, + 60: { # '๕' + 5: 0, # 'ก' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'ญ' + 58: 0, # 'ฎ' + 57: 0, # 'ฏ' + 49: 0, # 'ฐ' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'ฝ' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'แ' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 2, # '๑' + 59: 1, # '๒' + 60: 0, # '๕' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +TIS_620_THAI_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 182, # 'A' + 66: 106, # 'B' + 67: 107, # 'C' + 68: 100, # 'D' + 69: 183, # 'E' + 70: 184, # 'F' + 71: 185, # 'G' + 72: 101, # 'H' + 73: 94, # 'I' + 74: 186, # 'J' + 75: 187, # 'K' + 76: 108, # 'L' + 77: 109, # 'M' + 78: 110, # 'N' + 79: 111, # 'O' + 80: 188, # 'P' + 81: 189, # 'Q' + 82: 190, # 'R' + 83: 89, # 'S' + 84: 95, # 'T' + 85: 112, # 'U' + 86: 113, # 'V' + 87: 191, # 'W' + 88: 192, # 'X' + 89: 193, # 'Y' + 90: 194, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 64, # 'a' + 98: 72, # 'b' + 99: 73, # 'c' + 100: 114, # 'd' + 101: 74, # 'e' + 102: 115, # 'f' + 103: 116, # 'g' + 104: 102, # 'h' + 105: 81, # 'i' + 106: 201, # 'j' + 107: 117, # 'k' + 108: 90, # 'l' + 109: 103, # 'm' + 110: 78, # 'n' + 111: 82, # 'o' + 112: 96, # 'p' + 113: 202, # 'q' + 114: 91, # 'r' + 115: 79, # 's' + 116: 84, # 't' + 117: 104, # 'u' + 118: 105, # 'v' + 119: 97, # 'w' + 120: 98, # 'x' + 121: 92, # 'y' + 122: 203, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 209, # '\x80' + 129: 210, # '\x81' + 130: 211, # '\x82' + 131: 212, # '\x83' + 132: 213, # '\x84' + 133: 88, # '\x85' + 134: 214, # '\x86' + 135: 215, # '\x87' + 136: 216, # '\x88' + 137: 217, # '\x89' + 138: 218, # '\x8a' + 139: 219, # '\x8b' + 140: 220, # '\x8c' + 141: 118, # '\x8d' + 142: 221, # '\x8e' + 143: 222, # '\x8f' + 144: 223, # '\x90' + 145: 224, # '\x91' + 146: 99, # '\x92' + 147: 85, # '\x93' + 148: 83, # '\x94' + 149: 225, # '\x95' + 150: 226, # '\x96' + 151: 227, # '\x97' + 152: 228, # '\x98' + 153: 229, # '\x99' + 154: 230, # '\x9a' + 155: 231, # '\x9b' + 156: 232, # '\x9c' + 157: 233, # '\x9d' + 158: 234, # '\x9e' + 159: 235, # '\x9f' + 160: 236, # None + 161: 5, # 'ก' + 162: 30, # 'ข' + 163: 237, # 'ฃ' + 164: 24, # 'ค' + 165: 238, # 'ฅ' + 166: 75, # 'ฆ' + 167: 8, # 'ง' + 168: 26, # 'จ' + 169: 52, # 'ฉ' + 170: 34, # 'ช' + 171: 51, # 'ซ' + 172: 119, # 'ฌ' + 173: 47, # 'ญ' + 174: 58, # 'ฎ' + 175: 57, # 'ฏ' + 176: 49, # 'ฐ' + 177: 53, # 'ฑ' + 178: 55, # 'ฒ' + 179: 43, # 'ณ' + 180: 20, # 'ด' + 181: 19, # 'ต' + 182: 44, # 'ถ' + 183: 14, # 'ท' + 184: 48, # 'ธ' + 185: 3, # 'น' + 186: 17, # 'บ' + 187: 25, # 'ป' + 188: 39, # 'ผ' + 189: 62, # 'ฝ' + 190: 31, # 'พ' + 191: 54, # 'ฟ' + 192: 45, # 'ภ' + 193: 9, # 'ม' + 194: 16, # 'ย' + 195: 2, # 'ร' + 196: 61, # 'ฤ' + 197: 15, # 'ล' + 198: 239, # 'ฦ' + 199: 12, # 'ว' + 200: 42, # 'ศ' + 201: 46, # 'ษ' + 202: 18, # 'ส' + 203: 21, # 'ห' + 204: 76, # 'ฬ' + 205: 4, # 'อ' + 206: 66, # 'ฮ' + 207: 63, # 'ฯ' + 208: 22, # 'ะ' + 209: 10, # 'ั' + 210: 1, # 'า' + 211: 36, # 'ำ' + 212: 23, # 'ิ' + 213: 13, # 'ี' + 214: 40, # 'ึ' + 215: 27, # 'ื' + 216: 32, # 'ุ' + 217: 35, # 'ู' + 218: 86, # 'ฺ' + 219: 240, # None + 220: 241, # None + 221: 242, # None + 222: 243, # None + 223: 244, # '฿' + 224: 11, # 'เ' + 225: 28, # 'แ' + 226: 41, # 'โ' + 227: 29, # 'ใ' + 228: 33, # 'ไ' + 229: 245, # 'ๅ' + 230: 50, # 'ๆ' + 231: 37, # '็' + 232: 6, # '่' + 233: 7, # '้' + 234: 67, # '๊' + 235: 77, # '๋' + 236: 38, # '์' + 237: 93, # 'ํ' + 238: 246, # '๎' + 239: 247, # '๏' + 240: 68, # '๐' + 241: 56, # '๑' + 242: 59, # '๒' + 243: 65, # '๓' + 244: 69, # '๔' + 245: 60, # '๕' + 246: 70, # '๖' + 247: 80, # '๗' + 248: 71, # '๘' + 249: 87, # '๙' + 250: 248, # '๚' + 251: 249, # '๛' + 252: 250, # None + 253: 251, # None + 254: 252, # None + 255: 253, # None +} + +TIS_620_THAI_MODEL = SingleByteCharSetModel(charset_name='TIS-620', + language='Thai', + char_to_order_map=TIS_620_THAI_CHAR_TO_ORDER, + language_model=THAI_LANG_MODEL, + typical_positive_ratio=0.926386, + keep_ascii_letters=False, + alphabet='กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛') + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langturkishmodel.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langturkishmodel.py new file mode 100644 index 0000000..354b600 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/langturkishmodel.py @@ -0,0 +1,4382 @@ +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +TURKISH_LANG_MODEL = { + 23: { # 'A' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 37: { # 'B' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 47: { # 'C' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 39: { # 'D' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 0, # 'ş' + }, + 29: { # 'E' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 52: { # 'F' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 1, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 2, # 'ş' + }, + 36: { # 'G' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 2, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 1, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 45: { # 'H' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 2, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 2, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 53: { # 'I' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 60: { # 'J' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 16: { # 'K' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 1, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 49: { # 'L' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 2, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 20: { # 'M' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 0, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 46: { # 'N' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 42: { # 'O' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 2, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 48: { # 'P' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 44: { # 'R' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, + 35: { # 'S' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 1, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 2, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 31: { # 'T' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 2, # 't' + 14: 2, # 'u' + 32: 1, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 51: { # 'U' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 38: { # 'V' + 23: 1, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 1, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 62: { # 'W' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 43: { # 'Y' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 0, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 1, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 56: { # 'Z' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 1: { # 'a' + 23: 3, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 1, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 21: { # 'b' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 3, # 'g' + 25: 1, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 2, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 28: { # 'c' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 2, # 'T' + 51: 2, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 3, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 1, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 1, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 2, # 'ş' + }, + 12: { # 'd' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 2: { # 'e' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 18: { # 'f' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 1, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 1, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 27: { # 'g' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 25: { # 'h' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 3: { # 'i' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 1, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 3, # 'g' + 25: 1, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 24: { # 'j' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 10: { # 'k' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 2, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 5: { # 'l' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 1, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 2, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 13: { # 'm' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 2, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 4: { # 'n' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 15: { # 'o' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 2, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ğ' + 41: 2, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Ş' + 19: 2, # 'ş' + }, + 26: { # 'p' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 7: { # 'r' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 1, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 8: { # 's' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 9: { # 't' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 14: { # 'u' + 23: 3, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 2, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 2, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 32: { # 'v' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 57: { # 'w' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 1, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 58: { # 'x' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 11: { # 'y' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 22: { # 'z' + 23: 2, # 'A' + 37: 2, # 'B' + 47: 1, # 'C' + 39: 2, # 'D' + 29: 3, # 'E' + 52: 1, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 2, # 'N' + 42: 2, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 3, # 'T' + 51: 2, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 1, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 2, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 1, # 'Ş' + 19: 2, # 'ş' + }, + 63: { # '·' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 54: { # 'Ç' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 3, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 50: { # 'Ö' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 2, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 1, # 'N' + 42: 2, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 1, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 1, # 's' + 9: 2, # 't' + 14: 0, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 1, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 55: { # 'Ü' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 59: { # 'â' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 0, # 'ş' + }, + 33: { # 'ç' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 0, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 61: { # 'î' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 1, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 34: { # 'ö' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 1, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 3, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ğ' + 41: 1, # 'İ' + 6: 1, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 17: { # 'ü' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 30: { # 'ğ' + 23: 0, # 'A' + 37: 2, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 2, # 'N' + 42: 2, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 3, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 2, # 'İ' + 6: 2, # 'ı' + 40: 2, # 'Ş' + 19: 1, # 'ş' + }, + 41: { # 'İ' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 2, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 1, # 'ş' + }, + 6: { # 'ı' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ğ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Ş' + 19: 0, # 'ş' + }, + 40: { # 'Ş' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 2, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 0, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 3, # 'f' + 27: 0, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 1, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 1, # 'ü' + 30: 2, # 'ğ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Ş' + 19: 2, # 'ş' + }, + 19: { # 'ş' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 2, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 1, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ğ' + 41: 1, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Ş' + 19: 1, # 'ş' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +ISO_8859_9_TURKISH_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 255, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 255, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 255, # ' ' + 33: 255, # '!' + 34: 255, # '"' + 35: 255, # '#' + 36: 255, # '$' + 37: 255, # '%' + 38: 255, # '&' + 39: 255, # "'" + 40: 255, # '(' + 41: 255, # ')' + 42: 255, # '*' + 43: 255, # '+' + 44: 255, # ',' + 45: 255, # '-' + 46: 255, # '.' + 47: 255, # '/' + 48: 255, # '0' + 49: 255, # '1' + 50: 255, # '2' + 51: 255, # '3' + 52: 255, # '4' + 53: 255, # '5' + 54: 255, # '6' + 55: 255, # '7' + 56: 255, # '8' + 57: 255, # '9' + 58: 255, # ':' + 59: 255, # ';' + 60: 255, # '<' + 61: 255, # '=' + 62: 255, # '>' + 63: 255, # '?' + 64: 255, # '@' + 65: 23, # 'A' + 66: 37, # 'B' + 67: 47, # 'C' + 68: 39, # 'D' + 69: 29, # 'E' + 70: 52, # 'F' + 71: 36, # 'G' + 72: 45, # 'H' + 73: 53, # 'I' + 74: 60, # 'J' + 75: 16, # 'K' + 76: 49, # 'L' + 77: 20, # 'M' + 78: 46, # 'N' + 79: 42, # 'O' + 80: 48, # 'P' + 81: 69, # 'Q' + 82: 44, # 'R' + 83: 35, # 'S' + 84: 31, # 'T' + 85: 51, # 'U' + 86: 38, # 'V' + 87: 62, # 'W' + 88: 65, # 'X' + 89: 43, # 'Y' + 90: 56, # 'Z' + 91: 255, # '[' + 92: 255, # '\\' + 93: 255, # ']' + 94: 255, # '^' + 95: 255, # '_' + 96: 255, # '`' + 97: 1, # 'a' + 98: 21, # 'b' + 99: 28, # 'c' + 100: 12, # 'd' + 101: 2, # 'e' + 102: 18, # 'f' + 103: 27, # 'g' + 104: 25, # 'h' + 105: 3, # 'i' + 106: 24, # 'j' + 107: 10, # 'k' + 108: 5, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 15, # 'o' + 112: 26, # 'p' + 113: 64, # 'q' + 114: 7, # 'r' + 115: 8, # 's' + 116: 9, # 't' + 117: 14, # 'u' + 118: 32, # 'v' + 119: 57, # 'w' + 120: 58, # 'x' + 121: 11, # 'y' + 122: 22, # 'z' + 123: 255, # '{' + 124: 255, # '|' + 125: 255, # '}' + 126: 255, # '~' + 127: 255, # '\x7f' + 128: 180, # '\x80' + 129: 179, # '\x81' + 130: 178, # '\x82' + 131: 177, # '\x83' + 132: 176, # '\x84' + 133: 175, # '\x85' + 134: 174, # '\x86' + 135: 173, # '\x87' + 136: 172, # '\x88' + 137: 171, # '\x89' + 138: 170, # '\x8a' + 139: 169, # '\x8b' + 140: 168, # '\x8c' + 141: 167, # '\x8d' + 142: 166, # '\x8e' + 143: 165, # '\x8f' + 144: 164, # '\x90' + 145: 163, # '\x91' + 146: 162, # '\x92' + 147: 161, # '\x93' + 148: 160, # '\x94' + 149: 159, # '\x95' + 150: 101, # '\x96' + 151: 158, # '\x97' + 152: 157, # '\x98' + 153: 156, # '\x99' + 154: 155, # '\x9a' + 155: 154, # '\x9b' + 156: 153, # '\x9c' + 157: 152, # '\x9d' + 158: 151, # '\x9e' + 159: 106, # '\x9f' + 160: 150, # '\xa0' + 161: 149, # '¡' + 162: 148, # '¢' + 163: 147, # '£' + 164: 146, # '¤' + 165: 145, # '¥' + 166: 144, # '¦' + 167: 100, # '§' + 168: 143, # '¨' + 169: 142, # '©' + 170: 141, # 'ª' + 171: 140, # '«' + 172: 139, # '¬' + 173: 138, # '\xad' + 174: 137, # '®' + 175: 136, # '¯' + 176: 94, # '°' + 177: 80, # '±' + 178: 93, # '²' + 179: 135, # '³' + 180: 105, # '´' + 181: 134, # 'µ' + 182: 133, # '¶' + 183: 63, # '·' + 184: 132, # '¸' + 185: 131, # '¹' + 186: 130, # 'º' + 187: 129, # '»' + 188: 128, # '¼' + 189: 127, # '½' + 190: 126, # '¾' + 191: 125, # '¿' + 192: 124, # 'À' + 193: 104, # 'Á' + 194: 73, # 'Â' + 195: 99, # 'Ã' + 196: 79, # 'Ä' + 197: 85, # 'Å' + 198: 123, # 'Æ' + 199: 54, # 'Ç' + 200: 122, # 'È' + 201: 98, # 'É' + 202: 92, # 'Ê' + 203: 121, # 'Ë' + 204: 120, # 'Ì' + 205: 91, # 'Í' + 206: 103, # 'Î' + 207: 119, # 'Ï' + 208: 68, # 'Ğ' + 209: 118, # 'Ñ' + 210: 117, # 'Ò' + 211: 97, # 'Ó' + 212: 116, # 'Ô' + 213: 115, # 'Õ' + 214: 50, # 'Ö' + 215: 90, # '×' + 216: 114, # 'Ø' + 217: 113, # 'Ù' + 218: 112, # 'Ú' + 219: 111, # 'Û' + 220: 55, # 'Ü' + 221: 41, # 'İ' + 222: 40, # 'Ş' + 223: 86, # 'ß' + 224: 89, # 'à' + 225: 70, # 'á' + 226: 59, # 'â' + 227: 78, # 'ã' + 228: 71, # 'ä' + 229: 82, # 'å' + 230: 88, # 'æ' + 231: 33, # 'ç' + 232: 77, # 'è' + 233: 66, # 'é' + 234: 84, # 'ê' + 235: 83, # 'ë' + 236: 110, # 'ì' + 237: 75, # 'í' + 238: 61, # 'î' + 239: 96, # 'ï' + 240: 30, # 'ğ' + 241: 67, # 'ñ' + 242: 109, # 'ò' + 243: 74, # 'ó' + 244: 87, # 'ô' + 245: 102, # 'õ' + 246: 34, # 'ö' + 247: 95, # '÷' + 248: 81, # 'ø' + 249: 108, # 'ù' + 250: 76, # 'ú' + 251: 72, # 'û' + 252: 17, # 'ü' + 253: 6, # 'ı' + 254: 19, # 'ş' + 255: 107, # 'ÿ' +} + +ISO_8859_9_TURKISH_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-9', + language='Turkish', + char_to_order_map=ISO_8859_9_TURKISH_CHAR_TO_ORDER, + language_model=TURKISH_LANG_MODEL, + typical_positive_ratio=0.97029, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVYZabcdefghijklmnoprstuvyzÂÇÎÖÛÜâçîöûüĞğİıŞş') + diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/latin1prober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/latin1prober.py new file mode 100644 index 0000000..7d1e8c2 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/latin1prober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) + / total) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.73 + return confidence diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcharsetprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcharsetprober.py new file mode 100644 index 0000000..6256ecf --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcharsetprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.distribution_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcsgroupprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..530abe7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcssm.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcssm.py new file mode 100644 index 0000000..8360d0f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/mbcssm.py @@ -0,0 +1,572 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +# BIG5 + +BIG5_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL = {'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': 'Big5'} + +# CP949 + +CP949_CLS = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL = {'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_CLS = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL = {'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,2,2,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff + + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL = {'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL = {'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': 'UTF-8'} diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..561c75b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-39.pyc new file mode 100644 index 0000000..41fd6fd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/languages.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/languages.py new file mode 100644 index 0000000..e8e4469 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/metadata/languages.py @@ -0,0 +1,309 @@ +# -*- coding: utf-8 -*- +""" +Metadata about languages used by our model training code for our +SingleByteCharSetProbers. Could be used for other things in the future. + +This code is based on the language metadata from the uchardet project. +""" +from __future__ import absolute_import, print_function + +from string import ascii_letters + + +# TODO: Add Ukranian (KOI8-U) + +class Language(object): + """Metadata about a language useful for training models + + :ivar name: The human name for the language, in English. + :type name: str + :ivar iso_code: 2-letter ISO 639-1 if possible, 3-letter ISO code otherwise, + or use another catalog as a last resort. + :type iso_code: str + :ivar use_ascii: Whether or not ASCII letters should be included in trained + models. + :type use_ascii: bool + :ivar charsets: The charsets we want to support and create data for. + :type charsets: list of str + :ivar alphabet: The characters in the language's alphabet. If `use_ascii` is + `True`, you only need to add those not in the ASCII set. + :type alphabet: str + :ivar wiki_start_pages: The Wikipedia pages to start from if we're crawling + Wikipedia for training data. + :type wiki_start_pages: list of str + """ + def __init__(self, name=None, iso_code=None, use_ascii=True, charsets=None, + alphabet=None, wiki_start_pages=None): + super(Language, self).__init__() + self.name = name + self.iso_code = iso_code + self.use_ascii = use_ascii + self.charsets = charsets + if self.use_ascii: + if alphabet: + alphabet += ascii_letters + else: + alphabet = ascii_letters + elif not alphabet: + raise ValueError('Must supply alphabet if use_ascii is False') + self.alphabet = ''.join(sorted(set(alphabet))) if alphabet else None + self.wiki_start_pages = wiki_start_pages + + def __repr__(self): + return '{}({})'.format(self.__class__.__name__, + ', '.join('{}={!r}'.format(k, v) + for k, v in self.__dict__.items() + if not k.startswith('_'))) + + +LANGUAGES = {'Arabic': Language(name='Arabic', + iso_code='ar', + use_ascii=False, + # We only support encodings that use isolated + # forms, because the current recommendation is + # that the rendering system handles presentation + # forms. This means we purposefully skip IBM864. + charsets=['ISO-8859-6', 'WINDOWS-1256', + 'CP720', 'CP864'], + alphabet=u'ءآأؤإئابةتثجحخدذرزسشصضطظعغػؼؽؾؿـفقكلمنهوىيًٌٍَُِّ', + wiki_start_pages=[u'الصفحة_الرئيسية']), + 'Belarusian': Language(name='Belarusian', + iso_code='be', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'IBM866', 'MacCyrillic'], + alphabet=(u'АБВГДЕЁЖЗІЙКЛМНОПРСТУЎФХЦЧШЫЬЭЮЯ' + u'абвгдеёжзійклмнопрстуўфхцчшыьэюяʼ'), + wiki_start_pages=[u'Галоўная_старонка']), + 'Bulgarian': Language(name='Bulgarian', + iso_code='bg', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'IBM855'], + alphabet=(u'АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯ' + u'абвгдежзийклмнопрстуфхцчшщъьюя'), + wiki_start_pages=[u'Начална_страница']), + 'Czech': Language(name='Czech', + iso_code='cz', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'áčďéěíňóřšťúůýžÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ', + wiki_start_pages=[u'Hlavní_strana']), + 'Danish': Language(name='Danish', + iso_code='da', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'æøåÆØÅ', + wiki_start_pages=[u'Forside']), + 'German': Language(name='German', + iso_code='de', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + alphabet=u'äöüßÄÖÜ', + wiki_start_pages=[u'Wikipedia:Hauptseite']), + 'Greek': Language(name='Greek', + iso_code='el', + use_ascii=False, + charsets=['ISO-8859-7', 'WINDOWS-1253'], + alphabet=(u'αβγδεζηθικλμνξοπρσςτυφχψωάέήίόύώ' + u'ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΣΤΥΦΧΨΩΆΈΉΊΌΎΏ'), + wiki_start_pages=[u'Πύλη:Κύρια']), + 'English': Language(name='English', + iso_code='en', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + wiki_start_pages=[u'Main_Page']), + 'Esperanto': Language(name='Esperanto', + iso_code='eo', + # Q, W, X, and Y not used at all + use_ascii=False, + charsets=['ISO-8859-3'], + alphabet=(u'abcĉdefgĝhĥijĵklmnoprsŝtuŭvz' + u'ABCĈDEFGĜHĤIJĴKLMNOPRSŜTUŬVZ'), + wiki_start_pages=[u'Vikipedio:Ĉefpaĝo']), + 'Spanish': Language(name='Spanish', + iso_code='es', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ñáéíóúüÑÁÉÍÓÚÜ', + wiki_start_pages=[u'Wikipedia:Portada']), + 'Estonian': Language(name='Estonian', + iso_code='et', + use_ascii=False, + charsets=['ISO-8859-4', 'ISO-8859-13', + 'WINDOWS-1257'], + # C, F, Š, Q, W, X, Y, Z, Ž are only for + # loanwords + alphabet=(u'ABDEGHIJKLMNOPRSTUVÕÄÖÜ' + u'abdeghijklmnoprstuvõäöü'), + wiki_start_pages=[u'Esileht']), + 'Finnish': Language(name='Finnish', + iso_code='fi', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÅÄÖŠŽåäöšž', + wiki_start_pages=[u'Wikipedia:Etusivu']), + 'French': Language(name='French', + iso_code='fr', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'œàâçèéîïùûêŒÀÂÇÈÉÎÏÙÛÊ', + wiki_start_pages=[u'Wikipédia:Accueil_principal', + u'Bœuf (animal)']), + 'Hebrew': Language(name='Hebrew', + iso_code='he', + use_ascii=False, + charsets=['ISO-8859-8', 'WINDOWS-1255'], + alphabet=u'אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ', + wiki_start_pages=[u'עמוד_ראשי']), + 'Croatian': Language(name='Croatian', + iso_code='hr', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcčćdđefghijklmnoprsštuvzž' + u'ABCČĆDĐEFGHIJKLMNOPRSŠTUVZŽ'), + wiki_start_pages=[u'Glavna_stranica']), + 'Hungarian': Language(name='Hungarian', + iso_code='hu', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcdefghijklmnoprstuvzáéíóöőúüű' + u'ABCDEFGHIJKLMNOPRSTUVZÁÉÍÓÖŐÚÜŰ'), + wiki_start_pages=[u'Kezdőlap']), + 'Italian': Language(name='Italian', + iso_code='it', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÀÈÉÌÒÓÙàèéìòóù', + wiki_start_pages=[u'Pagina_principale']), + 'Lithuanian': Language(name='Lithuanian', + iso_code='lt', + use_ascii=False, + charsets=['ISO-8859-13', 'WINDOWS-1257', + 'ISO-8859-4'], + # Q, W, and X not used at all + alphabet=(u'AĄBCČDEĘĖFGHIĮYJKLMNOPRSŠTUŲŪVZŽ' + u'aąbcčdeęėfghiįyjklmnoprsštuųūvzž'), + wiki_start_pages=[u'Pagrindinis_puslapis']), + 'Latvian': Language(name='Latvian', + iso_code='lv', + use_ascii=False, + charsets=['ISO-8859-13', 'WINDOWS-1257', + 'ISO-8859-4'], + # Q, W, X, Y are only for loanwords + alphabet=(u'AĀBCČDEĒFGĢHIĪJKĶLĻMNŅOPRSŠTUŪVZŽ' + u'aābcčdeēfgģhiījkķlļmnņoprsštuūvzž'), + wiki_start_pages=[u'Sākumlapa']), + 'Macedonian': Language(name='Macedonian', + iso_code='mk', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'MacCyrillic', 'IBM855'], + alphabet=(u'АБВГДЃЕЖЗЅИЈКЛЉМНЊОПРСТЌУФХЦЧЏШ' + u'абвгдѓежзѕијклљмнњопрстќуфхцчџш'), + wiki_start_pages=[u'Главна_страница']), + 'Dutch': Language(name='Dutch', + iso_code='nl', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + wiki_start_pages=[u'Hoofdpagina']), + 'Polish': Language(name='Polish', + iso_code='pl', + # Q and X are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'AĄBCĆDEĘFGHIJKLŁMNŃOÓPRSŚTUWYZŹŻ' + u'aąbcćdeęfghijklłmnńoóprsśtuwyzźż'), + wiki_start_pages=[u'Wikipedia:Strona_główna']), + 'Portuguese': Language(name='Portuguese', + iso_code='pt', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÁÂÃÀÇÉÊÍÓÔÕÚáâãàçéêíóôõú', + wiki_start_pages=[u'Wikipédia:Página_principal']), + 'Romanian': Language(name='Romanian', + iso_code='ro', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'ăâîșțĂÂÎȘȚ', + wiki_start_pages=[u'Pagina_principală']), + 'Russian': Language(name='Russian', + iso_code='ru', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'KOI8-R', 'MacCyrillic', 'IBM866', + 'IBM855'], + alphabet=(u'абвгдеёжзийклмнопрстуфхцчшщъыьэюя' + u'АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ'), + wiki_start_pages=[u'Заглавная_страница']), + 'Slovak': Language(name='Slovak', + iso_code='sk', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'áäčďéíĺľňóôŕšťúýžÁÄČĎÉÍĹĽŇÓÔŔŠŤÚÝŽ', + wiki_start_pages=[u'Hlavná_stránka']), + 'Slovene': Language(name='Slovene', + iso_code='sl', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcčdefghijklmnoprsštuvzž' + u'ABCČDEFGHIJKLMNOPRSŠTUVZŽ'), + wiki_start_pages=[u'Glavna_stran']), + # Serbian can be written in both Latin and Cyrillic, but there's no + # simple way to get the Latin alphabet pages from Wikipedia through + # the API, so for now we just support Cyrillic. + 'Serbian': Language(name='Serbian', + iso_code='sr', + alphabet=(u'АБВГДЂЕЖЗИЈКЛЉМНЊОПРСТЋУФХЦЧЏШ' + u'абвгдђежзијклљмнњопрстћуфхцчџш'), + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'MacCyrillic', 'IBM855'], + wiki_start_pages=[u'Главна_страна']), + 'Thai': Language(name='Thai', + iso_code='th', + use_ascii=False, + charsets=['ISO-8859-11', 'TIS-620', 'CP874'], + alphabet=u'กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛', + wiki_start_pages=[u'หน้าหลัก']), + 'Turkish': Language(name='Turkish', + iso_code='tr', + # Q, W, and X are not used by Turkish + use_ascii=False, + charsets=['ISO-8859-3', 'ISO-8859-9', + 'WINDOWS-1254'], + alphabet=(u'abcçdefgğhıijklmnoöprsştuüvyzâîû' + u'ABCÇDEFGĞHIİJKLMNOÖPRSŞTUÜVYZÂÎÛ'), + wiki_start_pages=[u'Ana_Sayfa']), + 'Vietnamese': Language(name='Vietnamese', + iso_code='vi', + use_ascii=False, + # Windows-1258 is the only common 8-bit + # Vietnamese encoding supported by Python. + # From Wikipedia: + # For systems that lack support for Unicode, + # dozens of 8-bit Vietnamese code pages are + # available.[1] The most common are VISCII + # (TCVN 5712:1993), VPS, and Windows-1258.[3] + # Where ASCII is required, such as when + # ensuring readability in plain text e-mail, + # Vietnamese letters are often encoded + # according to Vietnamese Quoted-Readable + # (VIQR) or VSCII Mnemonic (VSCII-MNEM),[4] + # though usage of either variable-width + # scheme has declined dramatically following + # the adoption of Unicode on the World Wide + # Web. + charsets=['WINDOWS-1258'], + alphabet=(u'aăâbcdđeêghiklmnoôơpqrstuưvxy' + u'AĂÂBCDĐEÊGHIKLMNOÔƠPQRSTUƯVXY'), + wiki_start_pages=[u'Chữ_Quốc_ngữ']), + } diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/sbcharsetprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/sbcharsetprober.py new file mode 100644 index 0000000..46ba835 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/sbcharsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from collections import namedtuple + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +SingleByteCharSetModel = namedtuple('SingleByteCharSetModel', + ['charset_name', + 'language', + 'char_to_order_map', + 'language_model', + 'typical_positive_ratio', + 'keep_ascii_letters', + 'alphabet']) + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + else: + return self._model.charset_name + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + else: + return self._model.language + + def feed(self, byte_str): + # TODO: Make filter_international_words keep things in self.alphabet + if not self._model.keep_ascii_letters: + byte_str = self.filter_international_words(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model.char_to_order_map + language_model = self._model.language_model + for char in byte_str: + order = char_to_order_map.get(char, CharacterCategory.UNDEFINED) + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + # TODO: Follow uchardet's lead and discount confidence for frequent + # control characters. + # See https://github.com/BYVoid/uchardet/commit/55b4f23971db61 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + lm_cat = language_model[self._last_order][order] + else: + lm_cat = language_model[order][self._last_order] + self._seq_counters[lm_cat] += 1 + self._last_order = order + + charset_name = self._model.charset_name + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / + self._total_seqs / self._model.typical_positive_ratio) + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/sbcsgroupprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..bdeef4e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/sbcsgroupprober.py @@ -0,0 +1,83 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .hebrewprober import HebrewProber +from .langbulgarianmodel import (ISO_8859_5_BULGARIAN_MODEL, + WINDOWS_1251_BULGARIAN_MODEL) +from .langgreekmodel import ISO_8859_7_GREEK_MODEL, WINDOWS_1253_GREEK_MODEL +from .langhebrewmodel import WINDOWS_1255_HEBREW_MODEL +# from .langhungarianmodel import (ISO_8859_2_HUNGARIAN_MODEL, +# WINDOWS_1250_HUNGARIAN_MODEL) +from .langrussianmodel import (IBM855_RUSSIAN_MODEL, IBM866_RUSSIAN_MODEL, + ISO_8859_5_RUSSIAN_MODEL, KOI8_R_RUSSIAN_MODEL, + MACCYRILLIC_RUSSIAN_MODEL, + WINDOWS_1251_RUSSIAN_MODEL) +from .langthaimodel import TIS_620_THAI_MODEL +from .langturkishmodel import ISO_8859_9_TURKISH_MODEL +from .sbcharsetprober import SingleByteCharSetProber + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, + False, hebrew_prober) + # TODO: See if using ISO-8859-8 Hebrew model works better here, since + # it's actually the visual one + visual_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, + True, hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, + visual_hebrew_prober) + # TODO: ORDER MATTERS HERE. I changed the order vs what was in master + # and several tests failed that did not before. Some thought + # should be put into the ordering, and we should consider making + # order not matter here, because that is very counter-intuitive. + self.probers = [ + SingleByteCharSetProber(WINDOWS_1251_RUSSIAN_MODEL), + SingleByteCharSetProber(KOI8_R_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_5_RUSSIAN_MODEL), + SingleByteCharSetProber(MACCYRILLIC_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM866_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM855_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_7_GREEK_MODEL), + SingleByteCharSetProber(WINDOWS_1253_GREEK_MODEL), + SingleByteCharSetProber(ISO_8859_5_BULGARIAN_MODEL), + SingleByteCharSetProber(WINDOWS_1251_BULGARIAN_MODEL), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(ISO_8859_2_HUNGARIAN_MODEL), + # SingleByteCharSetProber(WINDOWS_1250_HUNGARIAN_MODEL), + SingleByteCharSetProber(TIS_620_THAI_MODEL), + SingleByteCharSetProber(ISO_8859_9_TURKISH_MODEL), + hebrew_prober, + logical_hebrew_prober, + visual_hebrew_prober, + ] + self.reset() diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/sjisprober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/sjisprober.py new file mode 100644 index 0000000..9e29623 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/sjisprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[2 - char_len:], + char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 + - char_len], char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/universaldetector.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/universaldetector.py new file mode 100644 index 0000000..055a8ac --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/universaldetector.py @@ -0,0 +1,286 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re + +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + + +class UniversalDetector(object): + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') + ESC_DETECTOR = re.compile(b'(\033|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') + ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', + 'iso-8859-2': 'Windows-1250', + 'iso-8859-5': 'Windows-1251', + 'iso-8859-6': 'Windows-1256', + 'iso-8859-7': 'Windows-1253', + 'iso-8859-8': 'Windows-1255', + 'iso-8859-9': 'Windows-1254', + 'iso-8859-13': 'Windows-1257'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding': None, 'confidence': 0.0, 'language': None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not len(byte_str): + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8-SIG", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\xFE\xFF\x00\x00'): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = {'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\x00\x00\xFF\xFE'): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = {'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16", + 'confidence': 1.0, + 'language': ''} + + self._got_data = True + if self.result['encoding'] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif self._input_state == InputState.PURE_ASCII and \ + self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': + self._esc_charset_prober.charset_name, + 'confidence': + self._esc_charset_prober.get_confidence(), + 'language': + self._esc_charset_prober.language} + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': prober.charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language} + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug('no data received!') + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {'encoding': 'ascii', + 'confidence': 1.0, + 'language': ''} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + self.result = {'encoding': charset_name, + 'confidence': confidence, + 'language': max_prober.language} + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() <= logging.DEBUG: + if self.result['encoding'] is None: + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + group_prober.charset_name, + group_prober.language, + group_prober.get_confidence()) + return self.result diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/utf8prober.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/utf8prober.py new file mode 100644 index 0000000..6c3196c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + else: + return unlike diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/version.py b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/version.py new file mode 100644 index 0000000..70369b9 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "4.0.0" +VERSION = __version__.split('.') diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__init__.py new file mode 100644 index 0000000..b149ed7 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__init__.py @@ -0,0 +1,6 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from .initialise import init, deinit, reinit, colorama_text +from .ansi import Fore, Back, Style, Cursor +from .ansitowin32 import AnsiToWin32 + +__version__ = '0.4.4' diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..ec19f43 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-39.pyc new file mode 100644 index 0000000..340b364 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-39.pyc new file mode 100644 index 0000000..81e8c9e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-39.pyc new file mode 100644 index 0000000..aa1ace2 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-39.pyc new file mode 100644 index 0000000..93addb5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-39.pyc new file mode 100644 index 0000000..8ff69ff Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/ansi.py b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/ansi.py new file mode 100644 index 0000000..11ec695 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/ansi.py @@ -0,0 +1,102 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +''' +This module generates ANSI character codes to printing colors to terminals. +See: http://en.wikipedia.org/wiki/ANSI_escape_code +''' + +CSI = '\033[' +OSC = '\033]' +BEL = '\a' + + +def code_to_chars(code): + return CSI + str(code) + 'm' + +def set_title(title): + return OSC + '2;' + title + BEL + +def clear_screen(mode=2): + return CSI + str(mode) + 'J' + +def clear_line(mode=2): + return CSI + str(mode) + 'K' + + +class AnsiCodes(object): + def __init__(self): + # the subclasses declare class attributes which are numbers. + # Upon instantiation we define instance attributes, which are the same + # as the class attributes but wrapped with the ANSI escape sequence + for name in dir(self): + if not name.startswith('_'): + value = getattr(self, name) + setattr(self, name, code_to_chars(value)) + + +class AnsiCursor(object): + def UP(self, n=1): + return CSI + str(n) + 'A' + def DOWN(self, n=1): + return CSI + str(n) + 'B' + def FORWARD(self, n=1): + return CSI + str(n) + 'C' + def BACK(self, n=1): + return CSI + str(n) + 'D' + def POS(self, x=1, y=1): + return CSI + str(y) + ';' + str(x) + 'H' + + +class AnsiFore(AnsiCodes): + BLACK = 30 + RED = 31 + GREEN = 32 + YELLOW = 33 + BLUE = 34 + MAGENTA = 35 + CYAN = 36 + WHITE = 37 + RESET = 39 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 90 + LIGHTRED_EX = 91 + LIGHTGREEN_EX = 92 + LIGHTYELLOW_EX = 93 + LIGHTBLUE_EX = 94 + LIGHTMAGENTA_EX = 95 + LIGHTCYAN_EX = 96 + LIGHTWHITE_EX = 97 + + +class AnsiBack(AnsiCodes): + BLACK = 40 + RED = 41 + GREEN = 42 + YELLOW = 43 + BLUE = 44 + MAGENTA = 45 + CYAN = 46 + WHITE = 47 + RESET = 49 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 100 + LIGHTRED_EX = 101 + LIGHTGREEN_EX = 102 + LIGHTYELLOW_EX = 103 + LIGHTBLUE_EX = 104 + LIGHTMAGENTA_EX = 105 + LIGHTCYAN_EX = 106 + LIGHTWHITE_EX = 107 + + +class AnsiStyle(AnsiCodes): + BRIGHT = 1 + DIM = 2 + NORMAL = 22 + RESET_ALL = 0 + +Fore = AnsiFore() +Back = AnsiBack() +Style = AnsiStyle() +Cursor = AnsiCursor() diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/ansitowin32.py b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/ansitowin32.py new file mode 100644 index 0000000..6039a05 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/ansitowin32.py @@ -0,0 +1,258 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import re +import sys +import os + +from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL +from .winterm import WinTerm, WinColor, WinStyle +from .win32 import windll, winapi_test + + +winterm = None +if windll is not None: + winterm = WinTerm() + + +class StreamWrapper(object): + ''' + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()', which is delegated to our + Converter instance. + ''' + def __init__(self, wrapped, converter): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + self.__convertor = converter + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def __enter__(self, *args, **kwargs): + # special method lookup bypasses __getattr__/__getattribute__, see + # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit + # thus, contextlib magic methods are not proxied via __getattr__ + return self.__wrapped.__enter__(*args, **kwargs) + + def __exit__(self, *args, **kwargs): + return self.__wrapped.__exit__(*args, **kwargs) + + def write(self, text): + self.__convertor.write(text) + + def isatty(self): + stream = self.__wrapped + if 'PYCHARM_HOSTED' in os.environ: + if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): + return True + try: + stream_isatty = stream.isatty + except AttributeError: + return False + else: + return stream_isatty() + + @property + def closed(self): + stream = self.__wrapped + try: + return stream.closed + except AttributeError: + return True + + +class AnsiToWin32(object): + ''' + Implements a 'write()' method which, on Windows, will strip ANSI character + sequences from the text, and if outputting to a tty, will convert them into + win32 function calls. + ''' + ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer + ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command + + def __init__(self, wrapped, convert=None, strip=None, autoreset=False): + # The wrapped stream (normally sys.stdout or sys.stderr) + self.wrapped = wrapped + + # should we reset colors to defaults after every .write() + self.autoreset = autoreset + + # create the proxy wrapping our output stream + self.stream = StreamWrapper(wrapped, self) + + on_windows = os.name == 'nt' + # We test if the WinAPI works, because even if we are on Windows + # we may be using a terminal that doesn't support the WinAPI + # (e.g. Cygwin Terminal). In this case it's up to the terminal + # to support the ANSI codes. + conversion_supported = on_windows and winapi_test() + + # should we strip ANSI sequences from our output? + if strip is None: + strip = conversion_supported or (not self.stream.closed and not self.stream.isatty()) + self.strip = strip + + # should we should convert ANSI sequences into win32 calls? + if convert is None: + convert = conversion_supported and not self.stream.closed and self.stream.isatty() + self.convert = convert + + # dict of ansi codes to win32 functions and parameters + self.win32_calls = self.get_win32_calls() + + # are we wrapping stderr? + self.on_stderr = self.wrapped is sys.stderr + + def should_wrap(self): + ''' + True if this class is actually needed. If false, then the output + stream will not be affected, nor will win32 calls be issued, so + wrapping stdout is not actually required. This will generally be + False on non-Windows platforms, unless optional functionality like + autoreset has been requested using kwargs to init() + ''' + return self.convert or self.strip or self.autoreset + + def get_win32_calls(self): + if self.convert and winterm: + return { + AnsiStyle.RESET_ALL: (winterm.reset_all, ), + AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), + AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), + AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), + AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), + AnsiFore.RED: (winterm.fore, WinColor.RED), + AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), + AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), + AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), + AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), + AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), + AnsiFore.WHITE: (winterm.fore, WinColor.GREY), + AnsiFore.RESET: (winterm.fore, ), + AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), + AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), + AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), + AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), + AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), + AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), + AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), + AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), + AnsiBack.BLACK: (winterm.back, WinColor.BLACK), + AnsiBack.RED: (winterm.back, WinColor.RED), + AnsiBack.GREEN: (winterm.back, WinColor.GREEN), + AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), + AnsiBack.BLUE: (winterm.back, WinColor.BLUE), + AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), + AnsiBack.CYAN: (winterm.back, WinColor.CYAN), + AnsiBack.WHITE: (winterm.back, WinColor.GREY), + AnsiBack.RESET: (winterm.back, ), + AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), + AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), + AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), + AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), + AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), + AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), + AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), + AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), + } + return dict() + + def write(self, text): + if self.strip or self.convert: + self.write_and_convert(text) + else: + self.wrapped.write(text) + self.wrapped.flush() + if self.autoreset: + self.reset_all() + + + def reset_all(self): + if self.convert: + self.call_win32('m', (0,)) + elif not self.strip and not self.stream.closed: + self.wrapped.write(Style.RESET_ALL) + + + def write_and_convert(self, text): + ''' + Write the given text to our wrapped stream, stripping any ANSI + sequences from the text, and optionally converting them into win32 + calls. + ''' + cursor = 0 + text = self.convert_osc(text) + for match in self.ANSI_CSI_RE.finditer(text): + start, end = match.span() + self.write_plain_text(text, cursor, start) + self.convert_ansi(*match.groups()) + cursor = end + self.write_plain_text(text, cursor, len(text)) + + + def write_plain_text(self, text, start, end): + if start < end: + self.wrapped.write(text[start:end]) + self.wrapped.flush() + + + def convert_ansi(self, paramstring, command): + if self.convert: + params = self.extract_params(command, paramstring) + self.call_win32(command, params) + + + def extract_params(self, command, paramstring): + if command in 'Hf': + params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) + while len(params) < 2: + # defaults: + params = params + (1,) + else: + params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) + if len(params) == 0: + # defaults: + if command in 'JKm': + params = (0,) + elif command in 'ABCD': + params = (1,) + + return params + + + def call_win32(self, command, params): + if command == 'm': + for param in params: + if param in self.win32_calls: + func_args = self.win32_calls[param] + func = func_args[0] + args = func_args[1:] + kwargs = dict(on_stderr=self.on_stderr) + func(*args, **kwargs) + elif command in 'J': + winterm.erase_screen(params[0], on_stderr=self.on_stderr) + elif command in 'K': + winterm.erase_line(params[0], on_stderr=self.on_stderr) + elif command in 'Hf': # cursor position - absolute + winterm.set_cursor_position(params, on_stderr=self.on_stderr) + elif command in 'ABCD': # cursor position - relative + n = params[0] + # A - up, B - down, C - forward, D - back + x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] + winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) + + + def convert_osc(self, text): + for match in self.ANSI_OSC_RE.finditer(text): + start, end = match.span() + text = text[:start] + text[end:] + paramstring, command = match.groups() + if command == BEL: + if paramstring.count(";") == 1: + params = paramstring.split(";") + # 0 - change title and icon (we will only change title) + # 1 - change icon (we don't support this) + # 2 - change title + if params[0] in '02': + winterm.set_title(params[1]) + return text diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/initialise.py b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/initialise.py new file mode 100644 index 0000000..430d066 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/initialise.py @@ -0,0 +1,80 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import atexit +import contextlib +import sys + +from .ansitowin32 import AnsiToWin32 + + +orig_stdout = None +orig_stderr = None + +wrapped_stdout = None +wrapped_stderr = None + +atexit_done = False + + +def reset_all(): + if AnsiToWin32 is not None: # Issue #74: objects might become None at exit + AnsiToWin32(orig_stdout).reset_all() + + +def init(autoreset=False, convert=None, strip=None, wrap=True): + + if not wrap and any([autoreset, convert, strip]): + raise ValueError('wrap=False conflicts with any other arg=True') + + global wrapped_stdout, wrapped_stderr + global orig_stdout, orig_stderr + + orig_stdout = sys.stdout + orig_stderr = sys.stderr + + if sys.stdout is None: + wrapped_stdout = None + else: + sys.stdout = wrapped_stdout = \ + wrap_stream(orig_stdout, convert, strip, autoreset, wrap) + if sys.stderr is None: + wrapped_stderr = None + else: + sys.stderr = wrapped_stderr = \ + wrap_stream(orig_stderr, convert, strip, autoreset, wrap) + + global atexit_done + if not atexit_done: + atexit.register(reset_all) + atexit_done = True + + +def deinit(): + if orig_stdout is not None: + sys.stdout = orig_stdout + if orig_stderr is not None: + sys.stderr = orig_stderr + + +@contextlib.contextmanager +def colorama_text(*args, **kwargs): + init(*args, **kwargs) + try: + yield + finally: + deinit() + + +def reinit(): + if wrapped_stdout is not None: + sys.stdout = wrapped_stdout + if wrapped_stderr is not None: + sys.stderr = wrapped_stderr + + +def wrap_stream(stream, convert, strip, autoreset, wrap): + if wrap: + wrapper = AnsiToWin32(stream, + convert=convert, strip=strip, autoreset=autoreset) + if wrapper.should_wrap(): + stream = wrapper.stream + return stream diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/win32.py b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/win32.py new file mode 100644 index 0000000..c2d8360 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/win32.py @@ -0,0 +1,152 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. + +# from winbase.h +STDOUT = -11 +STDERR = -12 + +try: + import ctypes + from ctypes import LibraryLoader + windll = LibraryLoader(ctypes.WinDLL) + from ctypes import wintypes +except (AttributeError, ImportError): + windll = None + SetConsoleTextAttribute = lambda *_: None + winapi_test = lambda *_: None +else: + from ctypes import byref, Structure, c_char, POINTER + + COORD = wintypes._COORD + + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + """struct in wincon.h.""" + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + def __str__(self): + return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( + self.dwSize.Y, self.dwSize.X + , self.dwCursorPosition.Y, self.dwCursorPosition.X + , self.wAttributes + , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right + , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X + ) + + _GetStdHandle = windll.kernel32.GetStdHandle + _GetStdHandle.argtypes = [ + wintypes.DWORD, + ] + _GetStdHandle.restype = wintypes.HANDLE + + _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo + _GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + POINTER(CONSOLE_SCREEN_BUFFER_INFO), + ] + _GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute + _SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + ] + _SetConsoleTextAttribute.restype = wintypes.BOOL + + _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition + _SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + COORD, + ] + _SetConsoleCursorPosition.restype = wintypes.BOOL + + _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA + _FillConsoleOutputCharacterA.argtypes = [ + wintypes.HANDLE, + c_char, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputCharacterA.restype = wintypes.BOOL + + _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute + _FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputAttribute.restype = wintypes.BOOL + + _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW + _SetConsoleTitleW.argtypes = [ + wintypes.LPCWSTR + ] + _SetConsoleTitleW.restype = wintypes.BOOL + + def _winapi_test(handle): + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return bool(success) + + def winapi_test(): + return any(_winapi_test(h) for h in + (_GetStdHandle(STDOUT), _GetStdHandle(STDERR))) + + def GetConsoleScreenBufferInfo(stream_id=STDOUT): + handle = _GetStdHandle(stream_id) + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return csbi + + def SetConsoleTextAttribute(stream_id, attrs): + handle = _GetStdHandle(stream_id) + return _SetConsoleTextAttribute(handle, attrs) + + def SetConsoleCursorPosition(stream_id, position, adjust=True): + position = COORD(*position) + # If the position is out of range, do nothing. + if position.Y <= 0 or position.X <= 0: + return + # Adjust for Windows' SetConsoleCursorPosition: + # 1. being 0-based, while ANSI is 1-based. + # 2. expecting (x,y), while ANSI uses (y,x). + adjusted_position = COORD(position.Y - 1, position.X - 1) + if adjust: + # Adjust for viewport's scroll position + sr = GetConsoleScreenBufferInfo(STDOUT).srWindow + adjusted_position.Y += sr.Top + adjusted_position.X += sr.Left + # Resume normal processing + handle = _GetStdHandle(stream_id) + return _SetConsoleCursorPosition(handle, adjusted_position) + + def FillConsoleOutputCharacter(stream_id, char, length, start): + handle = _GetStdHandle(stream_id) + char = c_char(char.encode()) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + success = _FillConsoleOutputCharacterA( + handle, char, length, start, byref(num_written)) + return num_written.value + + def FillConsoleOutputAttribute(stream_id, attr, length, start): + ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' + handle = _GetStdHandle(stream_id) + attribute = wintypes.WORD(attr) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + return _FillConsoleOutputAttribute( + handle, attribute, length, start, byref(num_written)) + + def SetConsoleTitle(title): + return _SetConsoleTitleW(title) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/winterm.py b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/winterm.py new file mode 100644 index 0000000..0fdb4ec --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/colorama/winterm.py @@ -0,0 +1,169 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from . import win32 + + +# from wincon.h +class WinColor(object): + BLACK = 0 + BLUE = 1 + GREEN = 2 + CYAN = 3 + RED = 4 + MAGENTA = 5 + YELLOW = 6 + GREY = 7 + +# from wincon.h +class WinStyle(object): + NORMAL = 0x00 # dim text, dim background + BRIGHT = 0x08 # bright text, dim background + BRIGHT_BACKGROUND = 0x80 # dim text, bright background + +class WinTerm(object): + + def __init__(self): + self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes + self.set_attrs(self._default) + self._default_fore = self._fore + self._default_back = self._back + self._default_style = self._style + # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. + # So that LIGHT_EX colors and BRIGHT style do not clobber each other, + # we track them separately, since LIGHT_EX is overwritten by Fore/Back + # and BRIGHT is overwritten by Style codes. + self._light = 0 + + def get_attrs(self): + return self._fore + self._back * 16 + (self._style | self._light) + + def set_attrs(self, value): + self._fore = value & 7 + self._back = (value >> 4) & 7 + self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) + + def reset_all(self, on_stderr=None): + self.set_attrs(self._default) + self.set_console(attrs=self._default) + self._light = 0 + + def fore(self, fore=None, light=False, on_stderr=False): + if fore is None: + fore = self._default_fore + self._fore = fore + # Emulate LIGHT_EX with BRIGHT Style + if light: + self._light |= WinStyle.BRIGHT + else: + self._light &= ~WinStyle.BRIGHT + self.set_console(on_stderr=on_stderr) + + def back(self, back=None, light=False, on_stderr=False): + if back is None: + back = self._default_back + self._back = back + # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style + if light: + self._light |= WinStyle.BRIGHT_BACKGROUND + else: + self._light &= ~WinStyle.BRIGHT_BACKGROUND + self.set_console(on_stderr=on_stderr) + + def style(self, style=None, on_stderr=False): + if style is None: + style = self._default_style + self._style = style + self.set_console(on_stderr=on_stderr) + + def set_console(self, attrs=None, on_stderr=False): + if attrs is None: + attrs = self.get_attrs() + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleTextAttribute(handle, attrs) + + def get_position(self, handle): + position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition + # Because Windows coordinates are 0-based, + # and win32.SetConsoleCursorPosition expects 1-based. + position.X += 1 + position.Y += 1 + return position + + def set_cursor_position(self, position=None, on_stderr=False): + if position is None: + # I'm not currently tracking the position, so there is no default. + # position = self.get_position() + return + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleCursorPosition(handle, position) + + def cursor_adjust(self, x, y, on_stderr=False): + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + position = self.get_position(handle) + adjusted_position = (position.Y + y, position.X + x) + win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) + + def erase_screen(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the screen. + # 1 should clear from the cursor to the beginning of the screen. + # 2 should clear the entire screen, and move cursor to (1,1) + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + # get the number of character cells in the current buffer + cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y + # get number of character cells before current cursor position + cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = cells_in_screen - cells_before_cursor + elif mode == 1: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_before_cursor + elif mode == 2: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_in_screen + else: + # invalid mode + return + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + if mode == 2: + # put the cursor where needed + win32.SetConsoleCursorPosition(handle, (1, 1)) + + def erase_line(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the line. + # 1 should clear from the cursor to the beginning of the line. + # 2 should clear the entire line. + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X + elif mode == 1: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwCursorPosition.X + elif mode == 2: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwSize.X + else: + # invalid mode + return + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + + def set_title(self, title): + win32.SetConsoleTitle(title) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__init__.py new file mode 100644 index 0000000..1154948 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__init__.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2019 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import logging + +__version__ = '0.3.3' + +class DistlibException(Exception): + pass + +try: + from logging import NullHandler +except ImportError: # pragma: no cover + class NullHandler(logging.Handler): + def handle(self, record): pass + def emit(self, record): pass + def createLock(self): self.lock = None + +logger = logging.getLogger(__name__) +logger.addHandler(NullHandler()) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..35199d3 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-39.pyc new file mode 100644 index 0000000..6186773 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-39.pyc new file mode 100644 index 0000000..6710a3f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-39.pyc new file mode 100644 index 0000000..fdd8fe5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-39.pyc new file mode 100644 index 0000000..8a8801f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-39.pyc new file mode 100644 index 0000000..4f4dbb3 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-39.pyc new file mode 100644 index 0000000..a97fd25 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-39.pyc new file mode 100644 index 0000000..3f4c8bd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-39.pyc new file mode 100644 index 0000000..9b6977d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-39.pyc new file mode 100644 index 0000000..1c109a4 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-39.pyc new file mode 100644 index 0000000..e691110 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-39.pyc new file mode 100644 index 0000000..f0dcd61 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-39.pyc new file mode 100644 index 0000000..85d38f3 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__init__.py new file mode 100644 index 0000000..f7dbf4c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__init__.py @@ -0,0 +1,6 @@ +"""Modules copied from Python 3 standard libraries, for internal use only. + +Individual classes and functions are found in d2._backport.misc. Intended +usage is to always import things missing from 3.1 from that module: the +built-in/stdlib objects will be used if found. +""" diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..8c5bec8 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-39.pyc new file mode 100644 index 0000000..ef2a98e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-39.pyc new file mode 100644 index 0000000..3825a86 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-39.pyc new file mode 100644 index 0000000..ff32bbd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-39.pyc new file mode 100644 index 0000000..5a8409c Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/misc.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/misc.py new file mode 100644 index 0000000..cfb318d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/misc.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Backports for individual classes and functions.""" + +import os +import sys + +__all__ = ['cache_from_source', 'callable', 'fsencode'] + + +try: + from imp import cache_from_source +except ImportError: + def cache_from_source(py_file, debug=__debug__): + ext = debug and 'c' or 'o' + return py_file + ext + + +try: + callable = callable +except NameError: + from collections import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode +except AttributeError: + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, str): + return filename.encode(sys.getfilesystemencoding()) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/shutil.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/shutil.py new file mode 100644 index 0000000..10ed362 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/shutil.py @@ -0,0 +1,764 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Utility functions for copying and archiving files and directory trees. + +XXX The functions here don't copy the resource fork or other metadata on Mac. + +""" + +import os +import sys +import stat +from os.path import abspath +import fnmatch +try: + from collections.abc import Callable +except ImportError: + from collections import Callable +import errno +from . import tarfile + +try: + import bz2 + _BZ2_SUPPORTED = True +except ImportError: + _BZ2_SUPPORTED = False + +try: + from pwd import getpwnam +except ImportError: + getpwnam = None + +try: + from grp import getgrnam +except ImportError: + getgrnam = None + +__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", + "copytree", "move", "rmtree", "Error", "SpecialFileError", + "ExecError", "make_archive", "get_archive_formats", + "register_archive_format", "unregister_archive_format", + "get_unpack_formats", "register_unpack_format", + "unregister_unpack_format", "unpack_archive", "ignore_patterns"] + +class Error(EnvironmentError): + pass + +class SpecialFileError(EnvironmentError): + """Raised when trying to do a kind of operation (e.g. copying) which is + not supported on a special file (e.g. a named pipe)""" + +class ExecError(EnvironmentError): + """Raised when a command could not be executed""" + +class ReadError(EnvironmentError): + """Raised when an archive cannot be read""" + +class RegistryError(Exception): + """Raised when a registry operation with the archiving + and unpacking registries fails""" + + +try: + WindowsError +except NameError: + WindowsError = None + +def copyfileobj(fsrc, fdst, length=16*1024): + """copy data from file-like object fsrc to file-like object fdst""" + while 1: + buf = fsrc.read(length) + if not buf: + break + fdst.write(buf) + +def _samefile(src, dst): + # Macintosh, Unix. + if hasattr(os.path, 'samefile'): + try: + return os.path.samefile(src, dst) + except OSError: + return False + + # All other platforms: check for same pathname. + return (os.path.normcase(os.path.abspath(src)) == + os.path.normcase(os.path.abspath(dst))) + +def copyfile(src, dst): + """Copy data from src to dst""" + if _samefile(src, dst): + raise Error("`%s` and `%s` are the same file" % (src, dst)) + + for fn in [src, dst]: + try: + st = os.stat(fn) + except OSError: + # File most likely does not exist + pass + else: + # XXX What about other special files? (sockets, devices...) + if stat.S_ISFIFO(st.st_mode): + raise SpecialFileError("`%s` is a named pipe" % fn) + + with open(src, 'rb') as fsrc: + with open(dst, 'wb') as fdst: + copyfileobj(fsrc, fdst) + +def copymode(src, dst): + """Copy mode bits from src to dst""" + if hasattr(os, 'chmod'): + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + os.chmod(dst, mode) + +def copystat(src, dst): + """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + if hasattr(os, 'utime'): + os.utime(dst, (st.st_atime, st.st_mtime)) + if hasattr(os, 'chmod'): + os.chmod(dst, mode) + if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): + try: + os.chflags(dst, st.st_flags) + except OSError as why: + if (not hasattr(errno, 'EOPNOTSUPP') or + why.errno != errno.EOPNOTSUPP): + raise + +def copy(src, dst): + """Copy data and mode bits ("cp src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copymode(src, dst) + +def copy2(src, dst): + """Copy data and all stat info ("cp -p src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copystat(src, dst) + +def ignore_patterns(*patterns): + """Function that can be used as copytree() ignore parameter. + + Patterns is a sequence of glob-style patterns + that are used to exclude files""" + def _ignore_patterns(path, names): + ignored_names = [] + for pattern in patterns: + ignored_names.extend(fnmatch.filter(names, pattern)) + return set(ignored_names) + return _ignore_patterns + +def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, + ignore_dangling_symlinks=False): + """Recursively copy a directory tree. + + The destination directory must not already exist. + If exception(s) occur, an Error is raised with a list of reasons. + + If the optional symlinks flag is true, symbolic links in the + source tree result in symbolic links in the destination tree; if + it is false, the contents of the files pointed to by symbolic + links are copied. If the file pointed by the symlink doesn't + exist, an exception will be added in the list of errors raised in + an Error exception at the end of the copy process. + + You can set the optional ignore_dangling_symlinks flag to true if you + want to silence this exception. Notice that this has no effect on + platforms that don't support os.symlink. + + The optional ignore argument is a callable. If given, it + is called with the `src` parameter, which is the directory + being visited by copytree(), and `names` which is the list of + `src` contents, as returned by os.listdir(): + + callable(src, names) -> ignored_names + + Since copytree() is called recursively, the callable will be + called once for each directory that is copied. It returns a + list of names relative to the `src` directory that should + not be copied. + + The optional copy_function argument is a callable that will be used + to copy each file. It will be called with the source path and the + destination path as arguments. By default, copy2() is used, but any + function that supports the same signature (like copy()) can be used. + + """ + names = os.listdir(src) + if ignore is not None: + ignored_names = ignore(src, names) + else: + ignored_names = set() + + os.makedirs(dst) + errors = [] + for name in names: + if name in ignored_names: + continue + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + try: + if os.path.islink(srcname): + linkto = os.readlink(srcname) + if symlinks: + os.symlink(linkto, dstname) + else: + # ignore dangling symlink if the flag is on + if not os.path.exists(linkto) and ignore_dangling_symlinks: + continue + # otherwise let the copy occurs. copy2 will raise an error + copy_function(srcname, dstname) + elif os.path.isdir(srcname): + copytree(srcname, dstname, symlinks, ignore, copy_function) + else: + # Will raise a SpecialFileError for unsupported file types + copy_function(srcname, dstname) + # catch the Error from the recursive copytree so that we can + # continue with other files + except Error as err: + errors.extend(err.args[0]) + except EnvironmentError as why: + errors.append((srcname, dstname, str(why))) + try: + copystat(src, dst) + except OSError as why: + if WindowsError is not None and isinstance(why, WindowsError): + # Copying file access times may fail on Windows + pass + else: + errors.extend((src, dst, str(why))) + if errors: + raise Error(errors) + +def rmtree(path, ignore_errors=False, onerror=None): + """Recursively delete a directory tree. + + If ignore_errors is set, errors are ignored; otherwise, if onerror + is set, it is called to handle the error with arguments (func, + path, exc_info) where func is os.listdir, os.remove, or os.rmdir; + path is the argument to that function that caused it to fail; and + exc_info is a tuple returned by sys.exc_info(). If ignore_errors + is false and onerror is None, an exception is raised. + + """ + if ignore_errors: + def onerror(*args): + pass + elif onerror is None: + def onerror(*args): + raise + try: + if os.path.islink(path): + # symlinks to directories are forbidden, see bug #1669 + raise OSError("Cannot call rmtree on a symbolic link") + except OSError: + onerror(os.path.islink, path, sys.exc_info()) + # can't continue even if onerror hook returns + return + names = [] + try: + names = os.listdir(path) + except os.error: + onerror(os.listdir, path, sys.exc_info()) + for name in names: + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except os.error: + mode = 0 + if stat.S_ISDIR(mode): + rmtree(fullname, ignore_errors, onerror) + else: + try: + os.remove(fullname) + except os.error: + onerror(os.remove, fullname, sys.exc_info()) + try: + os.rmdir(path) + except os.error: + onerror(os.rmdir, path, sys.exc_info()) + + +def _basename(path): + # A basename() variant which first strips the trailing slash, if present. + # Thus we always get the last component of the path, even for directories. + return os.path.basename(path.rstrip(os.path.sep)) + +def move(src, dst): + """Recursively move a file or directory to another location. This is + similar to the Unix "mv" command. + + If the destination is a directory or a symlink to a directory, the source + is moved inside the directory. The destination path must not already + exist. + + If the destination already exists but is not a directory, it may be + overwritten depending on os.rename() semantics. + + If the destination is on our current filesystem, then rename() is used. + Otherwise, src is copied to the destination and then removed. + A lot more could be done here... A look at a mv.c shows a lot of + the issues this implementation glosses over. + + """ + real_dst = dst + if os.path.isdir(dst): + if _samefile(src, dst): + # We might be on a case insensitive filesystem, + # perform the rename anyway. + os.rename(src, dst) + return + + real_dst = os.path.join(dst, _basename(src)) + if os.path.exists(real_dst): + raise Error("Destination path '%s' already exists" % real_dst) + try: + os.rename(src, real_dst) + except OSError: + if os.path.isdir(src): + if _destinsrc(src, dst): + raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) + copytree(src, real_dst, symlinks=True) + rmtree(src) + else: + copy2(src, real_dst) + os.unlink(src) + +def _destinsrc(src, dst): + src = abspath(src) + dst = abspath(dst) + if not src.endswith(os.path.sep): + src += os.path.sep + if not dst.endswith(os.path.sep): + dst += os.path.sep + return dst.startswith(src) + +def _get_gid(name): + """Returns a gid, given a group name.""" + if getgrnam is None or name is None: + return None + try: + result = getgrnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _get_uid(name): + """Returns an uid, given a user name.""" + if getpwnam is None or name is None: + return None + try: + result = getpwnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, + owner=None, group=None, logger=None): + """Create a (possibly compressed) tar file from all the files under + 'base_dir'. + + 'compress' must be "gzip" (the default), "bzip2", or None. + + 'owner' and 'group' can be used to define an owner and a group for the + archive that is being built. If not provided, the current owner and group + will be used. + + The output tar file will be named 'base_name' + ".tar", possibly plus + the appropriate compression extension (".gz", or ".bz2"). + + Returns the output filename. + """ + tar_compression = {'gzip': 'gz', None: ''} + compress_ext = {'gzip': '.gz'} + + if _BZ2_SUPPORTED: + tar_compression['bzip2'] = 'bz2' + compress_ext['bzip2'] = '.bz2' + + # flags for compression program, each element of list will be an argument + if compress is not None and compress not in compress_ext: + raise ValueError("bad value for 'compress', or compression format not " + "supported : {0}".format(compress)) + + archive_name = base_name + '.tar' + compress_ext.get(compress, '') + archive_dir = os.path.dirname(archive_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # creating the tarball + if logger is not None: + logger.info('Creating tar archive') + + uid = _get_uid(owner) + gid = _get_gid(group) + + def _set_uid_gid(tarinfo): + if gid is not None: + tarinfo.gid = gid + tarinfo.gname = group + if uid is not None: + tarinfo.uid = uid + tarinfo.uname = owner + return tarinfo + + if not dry_run: + tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) + try: + tar.add(base_dir, filter=_set_uid_gid) + finally: + tar.close() + + return archive_name + +def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): + # XXX see if we want to keep an external call here + if verbose: + zipoptions = "-r" + else: + zipoptions = "-rq" + from distutils.errors import DistutilsExecError + from distutils.spawn import spawn + try: + spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) + except DistutilsExecError: + # XXX really should distinguish between "couldn't find + # external 'zip' command" and "zip failed". + raise ExecError("unable to create zip file '%s': " + "could neither import the 'zipfile' module nor " + "find a standalone zip utility") % zip_filename + +def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): + """Create a zip file from all the files under 'base_dir'. + + The output zip file will be named 'base_name' + ".zip". Uses either the + "zipfile" Python module (if available) or the InfoZIP "zip" utility + (if installed and found on the default search path). If neither tool is + available, raises ExecError. Returns the name of the output zip + file. + """ + zip_filename = base_name + ".zip" + archive_dir = os.path.dirname(base_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # If zipfile module is not available, try spawning an external 'zip' + # command. + try: + import zipfile + except ImportError: + zipfile = None + + if zipfile is None: + _call_external_zip(base_dir, zip_filename, verbose, dry_run) + else: + if logger is not None: + logger.info("creating '%s' and adding '%s' to it", + zip_filename, base_dir) + + if not dry_run: + zip = zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_DEFLATED) + + for dirpath, dirnames, filenames in os.walk(base_dir): + for name in filenames: + path = os.path.normpath(os.path.join(dirpath, name)) + if os.path.isfile(path): + zip.write(path, path) + if logger is not None: + logger.info("adding '%s'", path) + zip.close() + + return zip_filename + +_ARCHIVE_FORMATS = { + 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), + 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), + 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), + 'zip': (_make_zipfile, [], "ZIP file"), + } + +if _BZ2_SUPPORTED: + _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], + "bzip2'ed tar-file") + +def get_archive_formats(): + """Returns a list of supported formats for archiving and unarchiving. + + Each element of the returned sequence is a tuple (name, description) + """ + formats = [(name, registry[2]) for name, registry in + _ARCHIVE_FORMATS.items()] + formats.sort() + return formats + +def register_archive_format(name, function, extra_args=None, description=''): + """Registers an archive format. + + name is the name of the format. function is the callable that will be + used to create archives. If provided, extra_args is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_archive_formats() function. + """ + if extra_args is None: + extra_args = [] + if not isinstance(function, Callable): + raise TypeError('The %s object is not callable' % function) + if not isinstance(extra_args, (tuple, list)): + raise TypeError('extra_args needs to be a sequence') + for element in extra_args: + if not isinstance(element, (tuple, list)) or len(element) !=2: + raise TypeError('extra_args elements are : (arg_name, value)') + + _ARCHIVE_FORMATS[name] = (function, extra_args, description) + +def unregister_archive_format(name): + del _ARCHIVE_FORMATS[name] + +def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, + dry_run=0, owner=None, group=None, logger=None): + """Create an archive file (eg. zip or tar). + + 'base_name' is the name of the file to create, minus any format-specific + extension; 'format' is the archive format: one of "zip", "tar", "bztar" + or "gztar". + + 'root_dir' is a directory that will be the root directory of the + archive; ie. we typically chdir into 'root_dir' before creating the + archive. 'base_dir' is the directory where we start archiving from; + ie. 'base_dir' will be the common prefix of all files and + directories in the archive. 'root_dir' and 'base_dir' both default + to the current directory. Returns the name of the archive file. + + 'owner' and 'group' are used when creating a tar archive. By default, + uses the current owner and group. + """ + save_cwd = os.getcwd() + if root_dir is not None: + if logger is not None: + logger.debug("changing into '%s'", root_dir) + base_name = os.path.abspath(base_name) + if not dry_run: + os.chdir(root_dir) + + if base_dir is None: + base_dir = os.curdir + + kwargs = {'dry_run': dry_run, 'logger': logger} + + try: + format_info = _ARCHIVE_FORMATS[format] + except KeyError: + raise ValueError("unknown archive format '%s'" % format) + + func = format_info[0] + for arg, val in format_info[1]: + kwargs[arg] = val + + if format != 'zip': + kwargs['owner'] = owner + kwargs['group'] = group + + try: + filename = func(base_name, base_dir, **kwargs) + finally: + if root_dir is not None: + if logger is not None: + logger.debug("changing back to '%s'", save_cwd) + os.chdir(save_cwd) + + return filename + + +def get_unpack_formats(): + """Returns a list of supported formats for unpacking. + + Each element of the returned sequence is a tuple + (name, extensions, description) + """ + formats = [(name, info[0], info[3]) for name, info in + _UNPACK_FORMATS.items()] + formats.sort() + return formats + +def _check_unpack_options(extensions, function, extra_args): + """Checks what gets registered as an unpacker.""" + # first make sure no other unpacker is registered for this extension + existing_extensions = {} + for name, info in _UNPACK_FORMATS.items(): + for ext in info[0]: + existing_extensions[ext] = name + + for extension in extensions: + if extension in existing_extensions: + msg = '%s is already registered for "%s"' + raise RegistryError(msg % (extension, + existing_extensions[extension])) + + if not isinstance(function, Callable): + raise TypeError('The registered function must be a callable') + + +def register_unpack_format(name, extensions, function, extra_args=None, + description=''): + """Registers an unpack format. + + `name` is the name of the format. `extensions` is a list of extensions + corresponding to the format. + + `function` is the callable that will be + used to unpack archives. The callable will receive archives to unpack. + If it's unable to handle an archive, it needs to raise a ReadError + exception. + + If provided, `extra_args` is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_unpack_formats() function. + """ + if extra_args is None: + extra_args = [] + _check_unpack_options(extensions, function, extra_args) + _UNPACK_FORMATS[name] = extensions, function, extra_args, description + +def unregister_unpack_format(name): + """Removes the pack format from the registry.""" + del _UNPACK_FORMATS[name] + +def _ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + +def _unpack_zipfile(filename, extract_dir): + """Unpack zip `filename` to `extract_dir` + """ + try: + import zipfile + except ImportError: + raise ReadError('zlib not supported, cannot unpack this archive.') + + if not zipfile.is_zipfile(filename): + raise ReadError("%s is not a zip file" % filename) + + zip = zipfile.ZipFile(filename) + try: + for info in zip.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name: + continue + + target = os.path.join(extract_dir, *name.split('/')) + if not target: + continue + + _ensure_directory(target) + if not name.endswith('/'): + # file + data = zip.read(info.filename) + f = open(target, 'wb') + try: + f.write(data) + finally: + f.close() + del data + finally: + zip.close() + +def _unpack_tarfile(filename, extract_dir): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise ReadError( + "%s is not a compressed or uncompressed tar file" % filename) + try: + tarobj.extractall(extract_dir) + finally: + tarobj.close() + +_UNPACK_FORMATS = { + 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), + 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), + 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") + } + +if _BZ2_SUPPORTED: + _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], + "bzip2'ed tar-file") + +def _find_unpack_format(filename): + for name, info in _UNPACK_FORMATS.items(): + for extension in info[0]: + if filename.endswith(extension): + return name + return None + +def unpack_archive(filename, extract_dir=None, format=None): + """Unpack an archive. + + `filename` is the name of the archive. + + `extract_dir` is the name of the target directory, where the archive + is unpacked. If not provided, the current working directory is used. + + `format` is the archive format: one of "zip", "tar", or "gztar". Or any + other registered format. If not provided, unpack_archive will use the + filename extension and see if an unpacker was registered for that + extension. + + In case none is found, a ValueError is raised. + """ + if extract_dir is None: + extract_dir = os.getcwd() + + if format is not None: + try: + format_info = _UNPACK_FORMATS[format] + except KeyError: + raise ValueError("Unknown unpack format '{0}'".format(format)) + + func = format_info[1] + func(filename, extract_dir, **dict(format_info[2])) + else: + # we need to look at the registered unpackers supported extensions + format = _find_unpack_format(filename) + if format is None: + raise ReadError("Unknown archive format '{0}'".format(filename)) + + func = _UNPACK_FORMATS[format][1] + kwargs = dict(_UNPACK_FORMATS[format][2]) + func(filename, extract_dir, **kwargs) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg new file mode 100644 index 0000000..1746bd0 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg @@ -0,0 +1,84 @@ +[posix_prefix] +# Configuration directories. Some of these come straight out of the +# configure script. They are for implementing the other variables, not to +# be used directly in [resource_locations]. +confdir = /etc +datadir = /usr/share +libdir = /usr/lib +statedir = /var +# User resource directory +local = ~/.local/{distribution.name} + +stdlib = {base}/lib/python{py_version_short} +platstdlib = {platbase}/lib/python{py_version_short} +purelib = {base}/lib/python{py_version_short}/site-packages +platlib = {platbase}/lib/python{py_version_short}/site-packages +include = {base}/include/python{py_version_short}{abiflags} +platinclude = {platbase}/include/python{py_version_short}{abiflags} +data = {base} + +[posix_home] +stdlib = {base}/lib/python +platstdlib = {base}/lib/python +purelib = {base}/lib/python +platlib = {base}/lib/python +include = {base}/include/python +platinclude = {base}/include/python +scripts = {base}/bin +data = {base} + +[nt] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2_home] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[nt_user] +stdlib = {userbase}/Python{py_version_nodot} +platstdlib = {userbase}/Python{py_version_nodot} +purelib = {userbase}/Python{py_version_nodot}/site-packages +platlib = {userbase}/Python{py_version_nodot}/site-packages +include = {userbase}/Python{py_version_nodot}/Include +scripts = {userbase}/Scripts +data = {userbase} + +[posix_user] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[osx_framework_user] +stdlib = {userbase}/lib/python +platstdlib = {userbase}/lib/python +purelib = {userbase}/lib/python/site-packages +platlib = {userbase}/lib/python/site-packages +include = {userbase}/include +scripts = {userbase}/bin +data = {userbase} diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/sysconfig.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/sysconfig.py new file mode 100644 index 0000000..b470a37 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/sysconfig.py @@ -0,0 +1,786 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Access to Python's configuration information.""" + +import codecs +import os +import re +import sys +from os.path import pardir, realpath +try: + import configparser +except ImportError: + import ConfigParser as configparser + + +__all__ = [ + 'get_config_h_filename', + 'get_config_var', + 'get_config_vars', + 'get_makefile_filename', + 'get_path', + 'get_path_names', + 'get_paths', + 'get_platform', + 'get_python_version', + 'get_scheme_names', + 'parse_config_h', +] + + +def _safe_realpath(path): + try: + return realpath(path) + except OSError: + return path + + +if sys.executable: + _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) +else: + # sys.executable can be empty if argv[0] has been changed and Python is + # unable to retrieve the real program name + _PROJECT_BASE = _safe_realpath(os.getcwd()) + +if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) +# PC/VS7.1 +if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) +# PC/AMD64 +if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) + + +def is_python_build(): + for fn in ("Setup.dist", "Setup.local"): + if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): + return True + return False + +_PYTHON_BUILD = is_python_build() + +_cfg_read = False + +def _ensure_cfg_read(): + global _cfg_read + if not _cfg_read: + from ..resources import finder + backport_package = __name__.rsplit('.', 1)[0] + _finder = finder(backport_package) + _cfgfile = _finder.find('sysconfig.cfg') + assert _cfgfile, 'sysconfig.cfg exists' + with _cfgfile.as_stream() as s: + _SCHEMES.readfp(s) + if _PYTHON_BUILD: + for scheme in ('posix_prefix', 'posix_home'): + _SCHEMES.set(scheme, 'include', '{srcdir}/Include') + _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') + + _cfg_read = True + + +_SCHEMES = configparser.RawConfigParser() +_VAR_REPL = re.compile(r'\{([^{]*?)\}') + +def _expand_globals(config): + _ensure_cfg_read() + if config.has_section('globals'): + globals = config.items('globals') + else: + globals = tuple() + + sections = config.sections() + for section in sections: + if section == 'globals': + continue + for option, value in globals: + if config.has_option(section, option): + continue + config.set(section, option, value) + config.remove_section('globals') + + # now expanding local variables defined in the cfg file + # + for section in config.sections(): + variables = dict(config.items(section)) + + def _replacer(matchobj): + name = matchobj.group(1) + if name in variables: + return variables[name] + return matchobj.group(0) + + for option, value in config.items(section): + config.set(section, option, _VAR_REPL.sub(_replacer, value)) + +#_expand_globals(_SCHEMES) + +_PY_VERSION = '%s.%s.%s' % sys.version_info[:3] +_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2] +_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2] +_PREFIX = os.path.normpath(sys.prefix) +_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) +_CONFIG_VARS = None +_USER_BASE = None + + +def _subst_vars(path, local_vars): + """In the string `path`, replace tokens like {some.thing} with the + corresponding value from the map `local_vars`. + + If there is no corresponding value, leave the token unchanged. + """ + def _replacer(matchobj): + name = matchobj.group(1) + if name in local_vars: + return local_vars[name] + elif name in os.environ: + return os.environ[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, path) + + +def _extend_dict(target_dict, other_dict): + target_keys = target_dict.keys() + for key, value in other_dict.items(): + if key in target_keys: + continue + target_dict[key] = value + + +def _expand_vars(scheme, vars): + res = {} + if vars is None: + vars = {} + _extend_dict(vars, get_config_vars()) + + for key, value in _SCHEMES.items(scheme): + if os.name in ('posix', 'nt'): + value = os.path.expanduser(value) + res[key] = os.path.normpath(_subst_vars(value, vars)) + return res + + +def format_value(value, vars): + def _replacer(matchobj): + name = matchobj.group(1) + if name in vars: + return vars[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, value) + + +def _get_default_scheme(): + if os.name == 'posix': + # the default scheme for posix is posix_prefix + return 'posix_prefix' + return os.name + + +def _getuserbase(): + env_base = os.environ.get("PYTHONUSERBASE", None) + + def joinuser(*args): + return os.path.expanduser(os.path.join(*args)) + + # what about 'os2emx', 'riscos' ? + if os.name == "nt": + base = os.environ.get("APPDATA") or "~" + if env_base: + return env_base + else: + return joinuser(base, "Python") + + if sys.platform == "darwin": + framework = get_config_var("PYTHONFRAMEWORK") + if framework: + if env_base: + return env_base + else: + return joinuser("~", "Library", framework, "%d.%d" % + sys.version_info[:2]) + + if env_base: + return env_base + else: + return joinuser("~", ".local") + + +def _parse_makefile(filename, vars=None): + """Parse a Makefile-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + # Regexes needed for parsing Makefile (and similar syntaxes, + # like old-style Setup files). + _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") + _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") + _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") + + if vars is None: + vars = {} + done = {} + notdone = {} + + with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: + lines = f.readlines() + + for line in lines: + if line.startswith('#') or line.strip() == '': + continue + m = _variable_rx.match(line) + if m: + n, v = m.group(1, 2) + v = v.strip() + # `$$' is a literal `$' in make + tmpv = v.replace('$$', '') + + if "$" in tmpv: + notdone[n] = v + else: + try: + v = int(v) + except ValueError: + # insert literal `$' + done[n] = v.replace('$$', '$') + else: + done[n] = v + + # do variable interpolation here + variables = list(notdone.keys()) + + # Variables with a 'PY_' prefix in the makefile. These need to + # be made available without that prefix through sysconfig. + # Special care is needed to ensure that variable expansion works, even + # if the expansion uses the name without a prefix. + renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') + + while len(variables) > 0: + for name in tuple(variables): + value = notdone[name] + m = _findvar1_rx.search(value) or _findvar2_rx.search(value) + if m is not None: + n = m.group(1) + found = True + if n in done: + item = str(done[n]) + elif n in notdone: + # get it on a subsequent round + found = False + elif n in os.environ: + # do it like make: fall back to environment + item = os.environ[n] + + elif n in renamed_variables: + if (name.startswith('PY_') and + name[3:] in renamed_variables): + item = "" + + elif 'PY_' + n in notdone: + found = False + + else: + item = str(done['PY_' + n]) + + else: + done[n] = item = "" + + if found: + after = value[m.end():] + value = value[:m.start()] + item + after + if "$" in after: + notdone[name] = value + else: + try: + value = int(value) + except ValueError: + done[name] = value.strip() + else: + done[name] = value + variables.remove(name) + + if (name.startswith('PY_') and + name[3:] in renamed_variables): + + name = name[3:] + if name not in done: + done[name] = value + + else: + # bogus variable reference (e.g. "prefix=$/opt/python"); + # just drop it since we can't deal + done[name] = value + variables.remove(name) + + # strip spurious spaces + for k, v in done.items(): + if isinstance(v, str): + done[k] = v.strip() + + # save the results in the global dictionary + vars.update(done) + return vars + + +def get_makefile_filename(): + """Return the path of the Makefile.""" + if _PYTHON_BUILD: + return os.path.join(_PROJECT_BASE, "Makefile") + if hasattr(sys, 'abiflags'): + config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) + else: + config_dir_name = 'config' + return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') + + +def _init_posix(vars): + """Initialize the module as appropriate for POSIX systems.""" + # load the installed Makefile: + makefile = get_makefile_filename() + try: + _parse_makefile(makefile, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % makefile + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # load the installed pyconfig.h: + config_h = get_config_h_filename() + try: + with open(config_h) as f: + parse_config_h(f, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % config_h + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # On AIX, there are wrong paths to the linker scripts in the Makefile + # -- these paths are relative to the Python source, but when installed + # the scripts are in another directory. + if _PYTHON_BUILD: + vars['LDSHARED'] = vars['BLDSHARED'] + + +def _init_non_posix(vars): + """Initialize the module as appropriate for NT""" + # set basic install directories + vars['LIBDEST'] = get_path('stdlib') + vars['BINLIBDEST'] = get_path('platstdlib') + vars['INCLUDEPY'] = get_path('include') + vars['SO'] = '.pyd' + vars['EXE'] = '.exe' + vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT + vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) + +# +# public APIs +# + + +def parse_config_h(fp, vars=None): + """Parse a config.h-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + if vars is None: + vars = {} + define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") + undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") + + while True: + line = fp.readline() + if not line: + break + m = define_rx.match(line) + if m: + n, v = m.group(1, 2) + try: + v = int(v) + except ValueError: + pass + vars[n] = v + else: + m = undef_rx.match(line) + if m: + vars[m.group(1)] = 0 + return vars + + +def get_config_h_filename(): + """Return the path of pyconfig.h.""" + if _PYTHON_BUILD: + if os.name == "nt": + inc_dir = os.path.join(_PROJECT_BASE, "PC") + else: + inc_dir = _PROJECT_BASE + else: + inc_dir = get_path('platinclude') + return os.path.join(inc_dir, 'pyconfig.h') + + +def get_scheme_names(): + """Return a tuple containing the schemes names.""" + return tuple(sorted(_SCHEMES.sections())) + + +def get_path_names(): + """Return a tuple containing the paths names.""" + # xxx see if we want a static list + return _SCHEMES.options('posix_prefix') + + +def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): + """Return a mapping containing an install scheme. + + ``scheme`` is the install scheme name. If not provided, it will + return the default scheme for the current platform. + """ + _ensure_cfg_read() + if expand: + return _expand_vars(scheme, vars) + else: + return dict(_SCHEMES.items(scheme)) + + +def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): + """Return a path corresponding to the scheme. + + ``scheme`` is the install scheme name. + """ + return get_paths(scheme, vars, expand)[name] + + +def get_config_vars(*args): + """With no arguments, return a dictionary of all configuration + variables relevant for the current platform. + + On Unix, this means every variable defined in Python's installed Makefile; + On Windows and Mac OS it's a much smaller set. + + With arguments, return a list of values that result from looking up + each argument in the configuration variable dictionary. + """ + global _CONFIG_VARS + if _CONFIG_VARS is None: + _CONFIG_VARS = {} + # Normalized versions of prefix and exec_prefix are handy to have; + # in fact, these are the standard versions used most places in the + # distutils2 module. + _CONFIG_VARS['prefix'] = _PREFIX + _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX + _CONFIG_VARS['py_version'] = _PY_VERSION + _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT + _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] + _CONFIG_VARS['base'] = _PREFIX + _CONFIG_VARS['platbase'] = _EXEC_PREFIX + _CONFIG_VARS['projectbase'] = _PROJECT_BASE + try: + _CONFIG_VARS['abiflags'] = sys.abiflags + except AttributeError: + # sys.abiflags may not be defined on all platforms. + _CONFIG_VARS['abiflags'] = '' + + if os.name in ('nt', 'os2'): + _init_non_posix(_CONFIG_VARS) + if os.name == 'posix': + _init_posix(_CONFIG_VARS) + # Setting 'userbase' is done below the call to the + # init function to enable using 'get_config_var' in + # the init-function. + if sys.version >= '2.6': + _CONFIG_VARS['userbase'] = _getuserbase() + + if 'srcdir' not in _CONFIG_VARS: + _CONFIG_VARS['srcdir'] = _PROJECT_BASE + else: + _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) + + # Convert srcdir into an absolute path if it appears necessary. + # Normally it is relative to the build directory. However, during + # testing, for example, we might be running a non-installed python + # from a different directory. + if _PYTHON_BUILD and os.name == "posix": + base = _PROJECT_BASE + try: + cwd = os.getcwd() + except OSError: + cwd = None + if (not os.path.isabs(_CONFIG_VARS['srcdir']) and + base != cwd): + # srcdir is relative and we are not in the same directory + # as the executable. Assume executable is in the build + # directory and make srcdir absolute. + srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) + _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) + + if sys.platform == 'darwin': + kernel_version = os.uname()[2] # Kernel version (8.4.3) + major_version = int(kernel_version.split('.')[0]) + + if major_version < 8: + # On Mac OS X before 10.4, check if -arch and -isysroot + # are in CFLAGS or LDFLAGS and remove them if they are. + # This is needed when building extensions on a 10.3 system + # using a universal build of python. + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + flags = _CONFIG_VARS[key] + flags = re.sub(r'-arch\s+\w+\s', ' ', flags) + flags = re.sub('-isysroot [^ \t]*', ' ', flags) + _CONFIG_VARS[key] = flags + else: + # Allow the user to override the architecture flags using + # an environment variable. + # NOTE: This name was introduced by Apple in OSX 10.5 and + # is used by several scripting languages distributed with + # that OS release. + if 'ARCHFLAGS' in os.environ: + arch = os.environ['ARCHFLAGS'] + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub(r'-arch\s+\w+\s', ' ', flags) + flags = flags + ' ' + arch + _CONFIG_VARS[key] = flags + + # If we're on OSX 10.5 or later and the user tries to + # compiles an extension using an SDK that is not present + # on the current machine it is better to not use an SDK + # than to fail. + # + # The major usecase for this is users using a Python.org + # binary installer on OSX 10.6: that installer uses + # the 10.4u SDK, but that SDK is not installed by default + # when you install Xcode. + # + CFLAGS = _CONFIG_VARS.get('CFLAGS', '') + m = re.search(r'-isysroot\s+(\S+)', CFLAGS) + if m is not None: + sdk = m.group(1) + if not os.path.exists(sdk): + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) + _CONFIG_VARS[key] = flags + + if args: + vals = [] + for name in args: + vals.append(_CONFIG_VARS.get(name)) + return vals + else: + return _CONFIG_VARS + + +def get_config_var(name): + """Return the value of a single variable using the dictionary returned by + 'get_config_vars()'. + + Equivalent to get_config_vars().get(name) + """ + return get_config_vars().get(name) + + +def get_platform(): + """Return a string that identifies the current platform. + + This is used mainly to distinguish platform-specific build directories and + platform-specific built distributions. Typically includes the OS name + and version and the architecture (as supplied by 'os.uname()'), + although the exact information included depends on the OS; eg. for IRIX + the architecture isn't particularly important (IRIX only runs on SGI + hardware), but for Linux the kernel version isn't particularly + important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + irix-5.3 + irix64-6.2 + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win-ia64 (64bit Windows on Itanium) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + """ + if os.name == 'nt': + # sniff sys.version for architecture. + prefix = " bit (" + i = sys.version.find(prefix) + if i == -1: + return sys.platform + j = sys.version.find(")", i) + look = sys.version[i+len(prefix):j].lower() + if look == 'amd64': + return 'win-amd64' + if look == 'itanium': + return 'win-ia64' + return sys.platform + + if os.name != "posix" or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + osname, host, release, version, machine = os.uname() + + # Convert the OS name to lowercase, remove '/' characters + # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_') + machine = machine.replace('/', '-') + + if osname[:5] == "linux": + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + elif osname[:5] == "sunos": + if release[0] >= "5": # SunOS 5 == Solaris 2 + osname = "solaris" + release = "%d.%s" % (int(release[0]) - 3, release[2:]) + # fall through to standard osname-release-machine representation + elif osname[:4] == "irix": # could be "irix64"! + return "%s-%s" % (osname, release) + elif osname[:3] == "aix": + return "%s-%s.%s" % (osname, version, release) + elif osname[:6] == "cygwin": + osname = "cygwin" + rel_re = re.compile(r'[\d.]+') + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == "darwin": + # + # For our purposes, we'll assume that the system version from + # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set + # to. This makes the compatibility story a bit more sane because the + # machine is going to compile and link as if it were + # MACOSX_DEPLOYMENT_TARGET. + cfgvars = get_config_vars() + macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') + + if True: + # Always calculate the release of the running machine, + # needed to determine if we can build fat binaries or not. + + macrelease = macver + # Get the system version. Reading this plist is a documented + # way to get the system version (see the documentation for + # the Gestalt Manager) + try: + f = open('/System/Library/CoreServices/SystemVersion.plist') + except IOError: + # We're on a plain darwin box, fall back to the default + # behaviour. + pass + else: + try: + m = re.search(r'ProductUserVisibleVersion\s*' + r'(.*?)', f.read()) + finally: + f.close() + if m is not None: + macrelease = '.'.join(m.group(1).split('.')[:2]) + # else: fall back to the default behaviour + + if not macver: + macver = macrelease + + if macver: + release = macver + osname = "macosx" + + if ((macrelease + '.') >= '10.4.' and + '-arch' in get_config_vars().get('CFLAGS', '').strip()): + # The universal build will build fat binaries, but not on + # systems before 10.4 + # + # Try to detect 4-way universal builds, those have machine-type + # 'universal' instead of 'fat'. + + machine = 'fat' + cflags = get_config_vars().get('CFLAGS') + + archs = re.findall(r'-arch\s+(\S+)', cflags) + archs = tuple(sorted(set(archs))) + + if len(archs) == 1: + machine = archs[0] + elif archs == ('i386', 'ppc'): + machine = 'fat' + elif archs == ('i386', 'x86_64'): + machine = 'intel' + elif archs == ('i386', 'ppc', 'x86_64'): + machine = 'fat3' + elif archs == ('ppc64', 'x86_64'): + machine = 'fat64' + elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): + machine = 'universal' + else: + raise ValueError( + "Don't know machine value for archs=%r" % (archs,)) + + elif machine == 'i386': + # On OSX the machine type returned by uname is always the + # 32-bit variant, even if the executable architecture is + # the 64-bit variant + if sys.maxsize >= 2**32: + machine = 'x86_64' + + elif machine in ('PowerPC', 'Power_Macintosh'): + # Pick a sane name for the PPC architecture. + # See 'i386' case + if sys.maxsize >= 2**32: + machine = 'ppc64' + else: + machine = 'ppc' + + return "%s-%s-%s" % (osname, release, machine) + + +def get_python_version(): + return _PY_VERSION_SHORT + + +def _print_dict(title, data): + for index, (key, value) in enumerate(sorted(data.items())): + if index == 0: + print('%s: ' % (title)) + print('\t%s = "%s"' % (key, value)) + + +def _main(): + """Display all information sysconfig detains.""" + print('Platform: "%s"' % get_platform()) + print('Python version: "%s"' % get_python_version()) + print('Current installation scheme: "%s"' % _get_default_scheme()) + print() + _print_dict('Paths', get_paths()) + print() + _print_dict('Variables', get_config_vars()) + + +if __name__ == '__main__': + _main() diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/tarfile.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/tarfile.py new file mode 100644 index 0000000..d66d856 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/_backport/tarfile.py @@ -0,0 +1,2607 @@ +#------------------------------------------------------------------- +# tarfile.py +#------------------------------------------------------------------- +# Copyright (C) 2002 Lars Gustaebel +# All rights reserved. +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +from __future__ import print_function + +"""Read from and write to tar format archives. +""" + +__version__ = "$Revision$" + +version = "0.9.0" +__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" +__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" +__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" +__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." + +#--------- +# Imports +#--------- +import sys +import os +import stat +import errno +import time +import struct +import copy +import re + +try: + import grp, pwd +except ImportError: + grp = pwd = None + +# os.symlink on Windows prior to 6.0 raises NotImplementedError +symlink_exception = (AttributeError, NotImplementedError) +try: + # WindowsError (1314) will be raised if the caller does not hold the + # SeCreateSymbolicLinkPrivilege privilege + symlink_exception += (WindowsError,) +except NameError: + pass + +# from tarfile import * +__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] + +if sys.version_info[0] < 3: + import __builtin__ as builtins +else: + import builtins + +_open = builtins.open # Since 'open' is TarFile.open + +#--------------------------------------------------------- +# tar constants +#--------------------------------------------------------- +NUL = b"\0" # the null character +BLOCKSIZE = 512 # length of processing blocks +RECORDSIZE = BLOCKSIZE * 20 # length of records +GNU_MAGIC = b"ustar \0" # magic gnu tar string +POSIX_MAGIC = b"ustar\x0000" # magic posix tar string + +LENGTH_NAME = 100 # maximum length of a filename +LENGTH_LINK = 100 # maximum length of a linkname +LENGTH_PREFIX = 155 # maximum length of the prefix field + +REGTYPE = b"0" # regular file +AREGTYPE = b"\0" # regular file +LNKTYPE = b"1" # link (inside tarfile) +SYMTYPE = b"2" # symbolic link +CHRTYPE = b"3" # character special device +BLKTYPE = b"4" # block special device +DIRTYPE = b"5" # directory +FIFOTYPE = b"6" # fifo special device +CONTTYPE = b"7" # contiguous file + +GNUTYPE_LONGNAME = b"L" # GNU tar longname +GNUTYPE_LONGLINK = b"K" # GNU tar longlink +GNUTYPE_SPARSE = b"S" # GNU tar sparse file + +XHDTYPE = b"x" # POSIX.1-2001 extended header +XGLTYPE = b"g" # POSIX.1-2001 global header +SOLARIS_XHDTYPE = b"X" # Solaris extended header + +USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format +GNU_FORMAT = 1 # GNU tar format +PAX_FORMAT = 2 # POSIX.1-2001 (pax) format +DEFAULT_FORMAT = GNU_FORMAT + +#--------------------------------------------------------- +# tarfile constants +#--------------------------------------------------------- +# File types that tarfile supports: +SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, + SYMTYPE, DIRTYPE, FIFOTYPE, + CONTTYPE, CHRTYPE, BLKTYPE, + GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# File types that will be treated as a regular file. +REGULAR_TYPES = (REGTYPE, AREGTYPE, + CONTTYPE, GNUTYPE_SPARSE) + +# File types that are part of the GNU tar format. +GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# Fields from a pax header that override a TarInfo attribute. +PAX_FIELDS = ("path", "linkpath", "size", "mtime", + "uid", "gid", "uname", "gname") + +# Fields from a pax header that are affected by hdrcharset. +PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname")) + +# Fields in a pax header that are numbers, all other fields +# are treated as strings. +PAX_NUMBER_FIELDS = { + "atime": float, + "ctime": float, + "mtime": float, + "uid": int, + "gid": int, + "size": int +} + +#--------------------------------------------------------- +# Bits used in the mode field, values in octal. +#--------------------------------------------------------- +S_IFLNK = 0o120000 # symbolic link +S_IFREG = 0o100000 # regular file +S_IFBLK = 0o060000 # block device +S_IFDIR = 0o040000 # directory +S_IFCHR = 0o020000 # character device +S_IFIFO = 0o010000 # fifo + +TSUID = 0o4000 # set UID on execution +TSGID = 0o2000 # set GID on execution +TSVTX = 0o1000 # reserved + +TUREAD = 0o400 # read by owner +TUWRITE = 0o200 # write by owner +TUEXEC = 0o100 # execute/search by owner +TGREAD = 0o040 # read by group +TGWRITE = 0o020 # write by group +TGEXEC = 0o010 # execute/search by group +TOREAD = 0o004 # read by other +TOWRITE = 0o002 # write by other +TOEXEC = 0o001 # execute/search by other + +#--------------------------------------------------------- +# initialization +#--------------------------------------------------------- +if os.name in ("nt", "ce"): + ENCODING = "utf-8" +else: + ENCODING = sys.getfilesystemencoding() + +#--------------------------------------------------------- +# Some useful functions +#--------------------------------------------------------- + +def stn(s, length, encoding, errors): + """Convert a string to a null-terminated bytes object. + """ + s = s.encode(encoding, errors) + return s[:length] + (length - len(s)) * NUL + +def nts(s, encoding, errors): + """Convert a null-terminated bytes object to a string. + """ + p = s.find(b"\0") + if p != -1: + s = s[:p] + return s.decode(encoding, errors) + +def nti(s): + """Convert a number field to a python number. + """ + # There are two possible encodings for a number field, see + # itn() below. + if s[0] != chr(0o200): + try: + n = int(nts(s, "ascii", "strict") or "0", 8) + except ValueError: + raise InvalidHeaderError("invalid header") + else: + n = 0 + for i in range(len(s) - 1): + n <<= 8 + n += ord(s[i + 1]) + return n + +def itn(n, digits=8, format=DEFAULT_FORMAT): + """Convert a python number to a number field. + """ + # POSIX 1003.1-1988 requires numbers to be encoded as a string of + # octal digits followed by a null-byte, this allows values up to + # (8**(digits-1))-1. GNU tar allows storing numbers greater than + # that if necessary. A leading 0o200 byte indicates this particular + # encoding, the following digits-1 bytes are a big-endian + # representation. This allows values up to (256**(digits-1))-1. + if 0 <= n < 8 ** (digits - 1): + s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL + else: + if format != GNU_FORMAT or n >= 256 ** (digits - 1): + raise ValueError("overflow in number field") + + if n < 0: + # XXX We mimic GNU tar's behaviour with negative numbers, + # this could raise OverflowError. + n = struct.unpack("L", struct.pack("l", n))[0] + + s = bytearray() + for i in range(digits - 1): + s.insert(0, n & 0o377) + n >>= 8 + s.insert(0, 0o200) + return s + +def calc_chksums(buf): + """Calculate the checksum for a member's header by summing up all + characters except for the chksum field which is treated as if + it was filled with spaces. According to the GNU tar sources, + some tars (Sun and NeXT) calculate chksum with signed char, + which will be different if there are chars in the buffer with + the high bit set. So we calculate two checksums, unsigned and + signed. + """ + unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) + signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) + return unsigned_chksum, signed_chksum + +def copyfileobj(src, dst, length=None): + """Copy length bytes from fileobj src to fileobj dst. + If length is None, copy the entire content. + """ + if length == 0: + return + if length is None: + while True: + buf = src.read(16*1024) + if not buf: + break + dst.write(buf) + return + + BUFSIZE = 16 * 1024 + blocks, remainder = divmod(length, BUFSIZE) + for b in range(blocks): + buf = src.read(BUFSIZE) + if len(buf) < BUFSIZE: + raise IOError("end of file reached") + dst.write(buf) + + if remainder != 0: + buf = src.read(remainder) + if len(buf) < remainder: + raise IOError("end of file reached") + dst.write(buf) + return + +filemode_table = ( + ((S_IFLNK, "l"), + (S_IFREG, "-"), + (S_IFBLK, "b"), + (S_IFDIR, "d"), + (S_IFCHR, "c"), + (S_IFIFO, "p")), + + ((TUREAD, "r"),), + ((TUWRITE, "w"),), + ((TUEXEC|TSUID, "s"), + (TSUID, "S"), + (TUEXEC, "x")), + + ((TGREAD, "r"),), + ((TGWRITE, "w"),), + ((TGEXEC|TSGID, "s"), + (TSGID, "S"), + (TGEXEC, "x")), + + ((TOREAD, "r"),), + ((TOWRITE, "w"),), + ((TOEXEC|TSVTX, "t"), + (TSVTX, "T"), + (TOEXEC, "x")) +) + +def filemode(mode): + """Convert a file's mode to a string of the form + -rwxrwxrwx. + Used by TarFile.list() + """ + perm = [] + for table in filemode_table: + for bit, char in table: + if mode & bit == bit: + perm.append(char) + break + else: + perm.append("-") + return "".join(perm) + +class TarError(Exception): + """Base exception.""" + pass +class ExtractError(TarError): + """General exception for extract errors.""" + pass +class ReadError(TarError): + """Exception for unreadable tar archives.""" + pass +class CompressionError(TarError): + """Exception for unavailable compression methods.""" + pass +class StreamError(TarError): + """Exception for unsupported operations on stream-like TarFiles.""" + pass +class HeaderError(TarError): + """Base exception for header errors.""" + pass +class EmptyHeaderError(HeaderError): + """Exception for empty headers.""" + pass +class TruncatedHeaderError(HeaderError): + """Exception for truncated headers.""" + pass +class EOFHeaderError(HeaderError): + """Exception for end of file headers.""" + pass +class InvalidHeaderError(HeaderError): + """Exception for invalid headers.""" + pass +class SubsequentHeaderError(HeaderError): + """Exception for missing and invalid extended headers.""" + pass + +#--------------------------- +# internal stream interface +#--------------------------- +class _LowLevelFile(object): + """Low-level file object. Supports reading and writing. + It is used instead of a regular file object for streaming + access. + """ + + def __init__(self, name, mode): + mode = { + "r": os.O_RDONLY, + "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + }[mode] + if hasattr(os, "O_BINARY"): + mode |= os.O_BINARY + self.fd = os.open(name, mode, 0o666) + + def close(self): + os.close(self.fd) + + def read(self, size): + return os.read(self.fd, size) + + def write(self, s): + os.write(self.fd, s) + +class _Stream(object): + """Class that serves as an adapter between TarFile and + a stream-like object. The stream-like object only + needs to have a read() or write() method and is accessed + blockwise. Use of gzip or bzip2 compression is possible. + A stream-like object could be for example: sys.stdin, + sys.stdout, a socket, a tape device etc. + + _Stream is intended to be used only internally. + """ + + def __init__(self, name, mode, comptype, fileobj, bufsize): + """Construct a _Stream object. + """ + self._extfileobj = True + if fileobj is None: + fileobj = _LowLevelFile(name, mode) + self._extfileobj = False + + if comptype == '*': + # Enable transparent compression detection for the + # stream interface + fileobj = _StreamProxy(fileobj) + comptype = fileobj.getcomptype() + + self.name = name or "" + self.mode = mode + self.comptype = comptype + self.fileobj = fileobj + self.bufsize = bufsize + self.buf = b"" + self.pos = 0 + self.closed = False + + try: + if comptype == "gz": + try: + import zlib + except ImportError: + raise CompressionError("zlib module is not available") + self.zlib = zlib + self.crc = zlib.crc32(b"") + if mode == "r": + self._init_read_gz() + else: + self._init_write_gz() + + if comptype == "bz2": + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + if mode == "r": + self.dbuf = b"" + self.cmp = bz2.BZ2Decompressor() + else: + self.cmp = bz2.BZ2Compressor() + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + def __del__(self): + if hasattr(self, "closed") and not self.closed: + self.close() + + def _init_write_gz(self): + """Initialize for writing with gzip compression. + """ + self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, + -self.zlib.MAX_WBITS, + self.zlib.DEF_MEM_LEVEL, + 0) + timestamp = struct.pack(" self.bufsize: + self.fileobj.write(self.buf[:self.bufsize]) + self.buf = self.buf[self.bufsize:] + + def close(self): + """Close the _Stream object. No operation should be + done on it afterwards. + """ + if self.closed: + return + + if self.mode == "w" and self.comptype != "tar": + self.buf += self.cmp.flush() + + if self.mode == "w" and self.buf: + self.fileobj.write(self.buf) + self.buf = b"" + if self.comptype == "gz": + # The native zlib crc is an unsigned 32-bit integer, but + # the Python wrapper implicitly casts that to a signed C + # long. So, on a 32-bit box self.crc may "look negative", + # while the same crc on a 64-bit box may "look positive". + # To avoid irksome warnings from the `struct` module, force + # it to look positive on all boxes. + self.fileobj.write(struct.pack("= 0: + blocks, remainder = divmod(pos - self.pos, self.bufsize) + for i in range(blocks): + self.read(self.bufsize) + self.read(remainder) + else: + raise StreamError("seeking backwards is not allowed") + return self.pos + + def read(self, size=None): + """Return the next size number of bytes from the stream. + If size is not defined, return all bytes of the stream + up to EOF. + """ + if size is None: + t = [] + while True: + buf = self._read(self.bufsize) + if not buf: + break + t.append(buf) + buf = "".join(t) + else: + buf = self._read(size) + self.pos += len(buf) + return buf + + def _read(self, size): + """Return size bytes from the stream. + """ + if self.comptype == "tar": + return self.__read(size) + + c = len(self.dbuf) + while c < size: + buf = self.__read(self.bufsize) + if not buf: + break + try: + buf = self.cmp.decompress(buf) + except IOError: + raise ReadError("invalid compressed data") + self.dbuf += buf + c += len(buf) + buf = self.dbuf[:size] + self.dbuf = self.dbuf[size:] + return buf + + def __read(self, size): + """Return size bytes from stream. If internal buffer is empty, + read another block from the stream. + """ + c = len(self.buf) + while c < size: + buf = self.fileobj.read(self.bufsize) + if not buf: + break + self.buf += buf + c += len(buf) + buf = self.buf[:size] + self.buf = self.buf[size:] + return buf +# class _Stream + +class _StreamProxy(object): + """Small proxy class that enables transparent compression + detection for the Stream interface (mode 'r|*'). + """ + + def __init__(self, fileobj): + self.fileobj = fileobj + self.buf = self.fileobj.read(BLOCKSIZE) + + def read(self, size): + self.read = self.fileobj.read + return self.buf + + def getcomptype(self): + if self.buf.startswith(b"\037\213\010"): + return "gz" + if self.buf.startswith(b"BZh91"): + return "bz2" + return "tar" + + def close(self): + self.fileobj.close() +# class StreamProxy + +class _BZ2Proxy(object): + """Small proxy class that enables external file object + support for "r:bz2" and "w:bz2" modes. This is actually + a workaround for a limitation in bz2 module's BZ2File + class which (unlike gzip.GzipFile) has no support for + a file object argument. + """ + + blocksize = 16 * 1024 + + def __init__(self, fileobj, mode): + self.fileobj = fileobj + self.mode = mode + self.name = getattr(self.fileobj, "name", None) + self.init() + + def init(self): + import bz2 + self.pos = 0 + if self.mode == "r": + self.bz2obj = bz2.BZ2Decompressor() + self.fileobj.seek(0) + self.buf = b"" + else: + self.bz2obj = bz2.BZ2Compressor() + + def read(self, size): + x = len(self.buf) + while x < size: + raw = self.fileobj.read(self.blocksize) + if not raw: + break + data = self.bz2obj.decompress(raw) + self.buf += data + x += len(data) + + buf = self.buf[:size] + self.buf = self.buf[size:] + self.pos += len(buf) + return buf + + def seek(self, pos): + if pos < self.pos: + self.init() + self.read(pos - self.pos) + + def tell(self): + return self.pos + + def write(self, data): + self.pos += len(data) + raw = self.bz2obj.compress(data) + self.fileobj.write(raw) + + def close(self): + if self.mode == "w": + raw = self.bz2obj.flush() + self.fileobj.write(raw) +# class _BZ2Proxy + +#------------------------ +# Extraction file object +#------------------------ +class _FileInFile(object): + """A thin wrapper around an existing file object that + provides a part of its data as an individual file + object. + """ + + def __init__(self, fileobj, offset, size, blockinfo=None): + self.fileobj = fileobj + self.offset = offset + self.size = size + self.position = 0 + + if blockinfo is None: + blockinfo = [(0, size)] + + # Construct a map with data and zero blocks. + self.map_index = 0 + self.map = [] + lastpos = 0 + realpos = self.offset + for offset, size in blockinfo: + if offset > lastpos: + self.map.append((False, lastpos, offset, None)) + self.map.append((True, offset, offset + size, realpos)) + realpos += size + lastpos = offset + size + if lastpos < self.size: + self.map.append((False, lastpos, self.size, None)) + + def seekable(self): + if not hasattr(self.fileobj, "seekable"): + # XXX gzip.GzipFile and bz2.BZ2File + return True + return self.fileobj.seekable() + + def tell(self): + """Return the current file position. + """ + return self.position + + def seek(self, position): + """Seek to a position in the file. + """ + self.position = position + + def read(self, size=None): + """Read data from the file. + """ + if size is None: + size = self.size - self.position + else: + size = min(size, self.size - self.position) + + buf = b"" + while size > 0: + while True: + data, start, stop, offset = self.map[self.map_index] + if start <= self.position < stop: + break + else: + self.map_index += 1 + if self.map_index == len(self.map): + self.map_index = 0 + length = min(size, stop - self.position) + if data: + self.fileobj.seek(offset + (self.position - start)) + buf += self.fileobj.read(length) + else: + buf += NUL * length + size -= length + self.position += length + return buf +#class _FileInFile + + +class ExFileObject(object): + """File-like object for reading an archive member. + Is returned by TarFile.extractfile(). + """ + blocksize = 1024 + + def __init__(self, tarfile, tarinfo): + self.fileobj = _FileInFile(tarfile.fileobj, + tarinfo.offset_data, + tarinfo.size, + tarinfo.sparse) + self.name = tarinfo.name + self.mode = "r" + self.closed = False + self.size = tarinfo.size + + self.position = 0 + self.buffer = b"" + + def readable(self): + return True + + def writable(self): + return False + + def seekable(self): + return self.fileobj.seekable() + + def read(self, size=None): + """Read at most size bytes from the file. If size is not + present or None, read all data until EOF is reached. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + buf = b"" + if self.buffer: + if size is None: + buf = self.buffer + self.buffer = b"" + else: + buf = self.buffer[:size] + self.buffer = self.buffer[size:] + + if size is None: + buf += self.fileobj.read() + else: + buf += self.fileobj.read(size - len(buf)) + + self.position += len(buf) + return buf + + # XXX TextIOWrapper uses the read1() method. + read1 = read + + def readline(self, size=-1): + """Read one entire line from the file. If size is present + and non-negative, return a string with at most that + size, which may be an incomplete line. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + while True: + buf = self.fileobj.read(self.blocksize) + self.buffer += buf + if not buf or b"\n" in buf: + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + pos = len(self.buffer) + break + + if size != -1: + pos = min(size, pos) + + buf = self.buffer[:pos] + self.buffer = self.buffer[pos:] + self.position += len(buf) + return buf + + def readlines(self): + """Return a list with all remaining lines. + """ + result = [] + while True: + line = self.readline() + if not line: break + result.append(line) + return result + + def tell(self): + """Return the current file position. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + return self.position + + def seek(self, pos, whence=os.SEEK_SET): + """Seek to a position in the file. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + if whence == os.SEEK_SET: + self.position = min(max(pos, 0), self.size) + elif whence == os.SEEK_CUR: + if pos < 0: + self.position = max(self.position + pos, 0) + else: + self.position = min(self.position + pos, self.size) + elif whence == os.SEEK_END: + self.position = max(min(self.size + pos, self.size), 0) + else: + raise ValueError("Invalid argument") + + self.buffer = b"" + self.fileobj.seek(self.position) + + def close(self): + """Close the file object. + """ + self.closed = True + + def __iter__(self): + """Get an iterator over the file's lines. + """ + while True: + line = self.readline() + if not line: + break + yield line +#class ExFileObject + +#------------------ +# Exported Classes +#------------------ +class TarInfo(object): + """Informational class which holds the details about an + archive member given by a tar header block. + TarInfo objects are returned by TarFile.getmember(), + TarFile.getmembers() and TarFile.gettarinfo() and are + usually created internally. + """ + + __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", + "chksum", "type", "linkname", "uname", "gname", + "devmajor", "devminor", + "offset", "offset_data", "pax_headers", "sparse", + "tarfile", "_sparse_structs", "_link_target") + + def __init__(self, name=""): + """Construct a TarInfo object. name is the optional name + of the member. + """ + self.name = name # member name + self.mode = 0o644 # file permissions + self.uid = 0 # user id + self.gid = 0 # group id + self.size = 0 # file size + self.mtime = 0 # modification time + self.chksum = 0 # header checksum + self.type = REGTYPE # member type + self.linkname = "" # link name + self.uname = "" # user name + self.gname = "" # group name + self.devmajor = 0 # device major number + self.devminor = 0 # device minor number + + self.offset = 0 # the tar header starts here + self.offset_data = 0 # the file's data starts here + + self.sparse = None # sparse member information + self.pax_headers = {} # pax header information + + # In pax headers the "name" and "linkname" field are called + # "path" and "linkpath". + def _getpath(self): + return self.name + def _setpath(self, name): + self.name = name + path = property(_getpath, _setpath) + + def _getlinkpath(self): + return self.linkname + def _setlinkpath(self, linkname): + self.linkname = linkname + linkpath = property(_getlinkpath, _setlinkpath) + + def __repr__(self): + return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) + + def get_info(self): + """Return the TarInfo's attributes as a dictionary. + """ + info = { + "name": self.name, + "mode": self.mode & 0o7777, + "uid": self.uid, + "gid": self.gid, + "size": self.size, + "mtime": self.mtime, + "chksum": self.chksum, + "type": self.type, + "linkname": self.linkname, + "uname": self.uname, + "gname": self.gname, + "devmajor": self.devmajor, + "devminor": self.devminor + } + + if info["type"] == DIRTYPE and not info["name"].endswith("/"): + info["name"] += "/" + + return info + + def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): + """Return a tar header as a string of 512 byte blocks. + """ + info = self.get_info() + + if format == USTAR_FORMAT: + return self.create_ustar_header(info, encoding, errors) + elif format == GNU_FORMAT: + return self.create_gnu_header(info, encoding, errors) + elif format == PAX_FORMAT: + return self.create_pax_header(info, encoding) + else: + raise ValueError("invalid format") + + def create_ustar_header(self, info, encoding, errors): + """Return the object as a ustar header block. + """ + info["magic"] = POSIX_MAGIC + + if len(info["linkname"]) > LENGTH_LINK: + raise ValueError("linkname is too long") + + if len(info["name"]) > LENGTH_NAME: + info["prefix"], info["name"] = self._posix_split_name(info["name"]) + + return self._create_header(info, USTAR_FORMAT, encoding, errors) + + def create_gnu_header(self, info, encoding, errors): + """Return the object as a GNU header block sequence. + """ + info["magic"] = GNU_MAGIC + + buf = b"" + if len(info["linkname"]) > LENGTH_LINK: + buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) + + if len(info["name"]) > LENGTH_NAME: + buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) + + return buf + self._create_header(info, GNU_FORMAT, encoding, errors) + + def create_pax_header(self, info, encoding): + """Return the object as a ustar header block. If it cannot be + represented this way, prepend a pax extended header sequence + with supplement information. + """ + info["magic"] = POSIX_MAGIC + pax_headers = self.pax_headers.copy() + + # Test string fields for values that exceed the field length or cannot + # be represented in ASCII encoding. + for name, hname, length in ( + ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), + ("uname", "uname", 32), ("gname", "gname", 32)): + + if hname in pax_headers: + # The pax header has priority. + continue + + # Try to encode the string as ASCII. + try: + info[name].encode("ascii", "strict") + except UnicodeEncodeError: + pax_headers[hname] = info[name] + continue + + if len(info[name]) > length: + pax_headers[hname] = info[name] + + # Test number fields for values that exceed the field limit or values + # that like to be stored as float. + for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): + if name in pax_headers: + # The pax header has priority. Avoid overflow. + info[name] = 0 + continue + + val = info[name] + if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): + pax_headers[name] = str(val) + info[name] = 0 + + # Create a pax extended header if necessary. + if pax_headers: + buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) + else: + buf = b"" + + return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") + + @classmethod + def create_pax_global_header(cls, pax_headers): + """Return the object as a pax global header block sequence. + """ + return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8") + + def _posix_split_name(self, name): + """Split a name longer than 100 chars into a prefix + and a name part. + """ + prefix = name[:LENGTH_PREFIX + 1] + while prefix and prefix[-1] != "/": + prefix = prefix[:-1] + + name = name[len(prefix):] + prefix = prefix[:-1] + + if not prefix or len(name) > LENGTH_NAME: + raise ValueError("name is too long") + return prefix, name + + @staticmethod + def _create_header(info, format, encoding, errors): + """Return a header block. info is a dictionary with file + information, format must be one of the *_FORMAT constants. + """ + parts = [ + stn(info.get("name", ""), 100, encoding, errors), + itn(info.get("mode", 0) & 0o7777, 8, format), + itn(info.get("uid", 0), 8, format), + itn(info.get("gid", 0), 8, format), + itn(info.get("size", 0), 12, format), + itn(info.get("mtime", 0), 12, format), + b" ", # checksum field + info.get("type", REGTYPE), + stn(info.get("linkname", ""), 100, encoding, errors), + info.get("magic", POSIX_MAGIC), + stn(info.get("uname", ""), 32, encoding, errors), + stn(info.get("gname", ""), 32, encoding, errors), + itn(info.get("devmajor", 0), 8, format), + itn(info.get("devminor", 0), 8, format), + stn(info.get("prefix", ""), 155, encoding, errors) + ] + + buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) + chksum = calc_chksums(buf[-BLOCKSIZE:])[0] + buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] + return buf + + @staticmethod + def _create_payload(payload): + """Return the string payload filled with zero bytes + up to the next 512 byte border. + """ + blocks, remainder = divmod(len(payload), BLOCKSIZE) + if remainder > 0: + payload += (BLOCKSIZE - remainder) * NUL + return payload + + @classmethod + def _create_gnu_long_header(cls, name, type, encoding, errors): + """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence + for name. + """ + name = name.encode(encoding, errors) + NUL + + info = {} + info["name"] = "././@LongLink" + info["type"] = type + info["size"] = len(name) + info["magic"] = GNU_MAGIC + + # create extended header + name blocks. + return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ + cls._create_payload(name) + + @classmethod + def _create_pax_generic_header(cls, pax_headers, type, encoding): + """Return a POSIX.1-2008 extended or global header sequence + that contains a list of keyword, value pairs. The values + must be strings. + """ + # Check if one of the fields contains surrogate characters and thereby + # forces hdrcharset=BINARY, see _proc_pax() for more information. + binary = False + for keyword, value in pax_headers.items(): + try: + value.encode("utf8", "strict") + except UnicodeEncodeError: + binary = True + break + + records = b"" + if binary: + # Put the hdrcharset field at the beginning of the header. + records += b"21 hdrcharset=BINARY\n" + + for keyword, value in pax_headers.items(): + keyword = keyword.encode("utf8") + if binary: + # Try to restore the original byte representation of `value'. + # Needless to say, that the encoding must match the string. + value = value.encode(encoding, "surrogateescape") + else: + value = value.encode("utf8") + + l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' + n = p = 0 + while True: + n = l + len(str(p)) + if n == p: + break + p = n + records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" + + # We use a hardcoded "././@PaxHeader" name like star does + # instead of the one that POSIX recommends. + info = {} + info["name"] = "././@PaxHeader" + info["type"] = type + info["size"] = len(records) + info["magic"] = POSIX_MAGIC + + # Create pax header + record blocks. + return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ + cls._create_payload(records) + + @classmethod + def frombuf(cls, buf, encoding, errors): + """Construct a TarInfo object from a 512 byte bytes object. + """ + if len(buf) == 0: + raise EmptyHeaderError("empty header") + if len(buf) != BLOCKSIZE: + raise TruncatedHeaderError("truncated header") + if buf.count(NUL) == BLOCKSIZE: + raise EOFHeaderError("end of file header") + + chksum = nti(buf[148:156]) + if chksum not in calc_chksums(buf): + raise InvalidHeaderError("bad checksum") + + obj = cls() + obj.name = nts(buf[0:100], encoding, errors) + obj.mode = nti(buf[100:108]) + obj.uid = nti(buf[108:116]) + obj.gid = nti(buf[116:124]) + obj.size = nti(buf[124:136]) + obj.mtime = nti(buf[136:148]) + obj.chksum = chksum + obj.type = buf[156:157] + obj.linkname = nts(buf[157:257], encoding, errors) + obj.uname = nts(buf[265:297], encoding, errors) + obj.gname = nts(buf[297:329], encoding, errors) + obj.devmajor = nti(buf[329:337]) + obj.devminor = nti(buf[337:345]) + prefix = nts(buf[345:500], encoding, errors) + + # Old V7 tar format represents a directory as a regular + # file with a trailing slash. + if obj.type == AREGTYPE and obj.name.endswith("/"): + obj.type = DIRTYPE + + # The old GNU sparse format occupies some of the unused + # space in the buffer for up to 4 sparse structures. + # Save the them for later processing in _proc_sparse(). + if obj.type == GNUTYPE_SPARSE: + pos = 386 + structs = [] + for i in range(4): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[482]) + origsize = nti(buf[483:495]) + obj._sparse_structs = (structs, isextended, origsize) + + # Remove redundant slashes from directories. + if obj.isdir(): + obj.name = obj.name.rstrip("/") + + # Reconstruct a ustar longname. + if prefix and obj.type not in GNU_TYPES: + obj.name = prefix + "/" + obj.name + return obj + + @classmethod + def fromtarfile(cls, tarfile): + """Return the next TarInfo object from TarFile object + tarfile. + """ + buf = tarfile.fileobj.read(BLOCKSIZE) + obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) + obj.offset = tarfile.fileobj.tell() - BLOCKSIZE + return obj._proc_member(tarfile) + + #-------------------------------------------------------------------------- + # The following are methods that are called depending on the type of a + # member. The entry point is _proc_member() which can be overridden in a + # subclass to add custom _proc_*() methods. A _proc_*() method MUST + # implement the following + # operations: + # 1. Set self.offset_data to the position where the data blocks begin, + # if there is data that follows. + # 2. Set tarfile.offset to the position where the next member's header will + # begin. + # 3. Return self or another valid TarInfo object. + def _proc_member(self, tarfile): + """Choose the right processing method depending on + the type and call it. + """ + if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): + return self._proc_gnulong(tarfile) + elif self.type == GNUTYPE_SPARSE: + return self._proc_sparse(tarfile) + elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): + return self._proc_pax(tarfile) + else: + return self._proc_builtin(tarfile) + + def _proc_builtin(self, tarfile): + """Process a builtin type or an unknown type which + will be treated as a regular file. + """ + self.offset_data = tarfile.fileobj.tell() + offset = self.offset_data + if self.isreg() or self.type not in SUPPORTED_TYPES: + # Skip the following data blocks. + offset += self._block(self.size) + tarfile.offset = offset + + # Patch the TarInfo object with saved global + # header information. + self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) + + return self + + def _proc_gnulong(self, tarfile): + """Process the blocks that hold a GNU longname + or longlink member. + """ + buf = tarfile.fileobj.read(self._block(self.size)) + + # Fetch the next header and process it. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Patch the TarInfo object from the next header with + # the longname information. + next.offset = self.offset + if self.type == GNUTYPE_LONGNAME: + next.name = nts(buf, tarfile.encoding, tarfile.errors) + elif self.type == GNUTYPE_LONGLINK: + next.linkname = nts(buf, tarfile.encoding, tarfile.errors) + + return next + + def _proc_sparse(self, tarfile): + """Process a GNU sparse header plus extra headers. + """ + # We already collected some sparse structures in frombuf(). + structs, isextended, origsize = self._sparse_structs + del self._sparse_structs + + # Collect sparse structures from extended header blocks. + while isextended: + buf = tarfile.fileobj.read(BLOCKSIZE) + pos = 0 + for i in range(21): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + if offset and numbytes: + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[504]) + self.sparse = structs + + self.offset_data = tarfile.fileobj.tell() + tarfile.offset = self.offset_data + self._block(self.size) + self.size = origsize + return self + + def _proc_pax(self, tarfile): + """Process an extended or global header as described in + POSIX.1-2008. + """ + # Read the header information. + buf = tarfile.fileobj.read(self._block(self.size)) + + # A pax header stores supplemental information for either + # the following file (extended) or all following files + # (global). + if self.type == XGLTYPE: + pax_headers = tarfile.pax_headers + else: + pax_headers = tarfile.pax_headers.copy() + + # Check if the pax header contains a hdrcharset field. This tells us + # the encoding of the path, linkpath, uname and gname fields. Normally, + # these fields are UTF-8 encoded but since POSIX.1-2008 tar + # implementations are allowed to store them as raw binary strings if + # the translation to UTF-8 fails. + match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) + if match is not None: + pax_headers["hdrcharset"] = match.group(1).decode("utf8") + + # For the time being, we don't care about anything other than "BINARY". + # The only other value that is currently allowed by the standard is + # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. + hdrcharset = pax_headers.get("hdrcharset") + if hdrcharset == "BINARY": + encoding = tarfile.encoding + else: + encoding = "utf8" + + # Parse pax header information. A record looks like that: + # "%d %s=%s\n" % (length, keyword, value). length is the size + # of the complete record including the length field itself and + # the newline. keyword and value are both UTF-8 encoded strings. + regex = re.compile(br"(\d+) ([^=]+)=") + pos = 0 + while True: + match = regex.match(buf, pos) + if not match: + break + + length, keyword = match.groups() + length = int(length) + value = buf[match.end(2) + 1:match.start(1) + length - 1] + + # Normally, we could just use "utf8" as the encoding and "strict" + # as the error handler, but we better not take the risk. For + # example, GNU tar <= 1.23 is known to store filenames it cannot + # translate to UTF-8 as raw strings (unfortunately without a + # hdrcharset=BINARY header). + # We first try the strict standard encoding, and if that fails we + # fall back on the user's encoding and error handler. + keyword = self._decode_pax_field(keyword, "utf8", "utf8", + tarfile.errors) + if keyword in PAX_NAME_FIELDS: + value = self._decode_pax_field(value, encoding, tarfile.encoding, + tarfile.errors) + else: + value = self._decode_pax_field(value, "utf8", "utf8", + tarfile.errors) + + pax_headers[keyword] = value + pos += length + + # Fetch the next header. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Process GNU sparse information. + if "GNU.sparse.map" in pax_headers: + # GNU extended sparse format version 0.1. + self._proc_gnusparse_01(next, pax_headers) + + elif "GNU.sparse.size" in pax_headers: + # GNU extended sparse format version 0.0. + self._proc_gnusparse_00(next, pax_headers, buf) + + elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": + # GNU extended sparse format version 1.0. + self._proc_gnusparse_10(next, pax_headers, tarfile) + + if self.type in (XHDTYPE, SOLARIS_XHDTYPE): + # Patch the TarInfo object with the extended header info. + next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) + next.offset = self.offset + + if "size" in pax_headers: + # If the extended header replaces the size field, + # we need to recalculate the offset where the next + # header starts. + offset = next.offset_data + if next.isreg() or next.type not in SUPPORTED_TYPES: + offset += next._block(next.size) + tarfile.offset = offset + + return next + + def _proc_gnusparse_00(self, next, pax_headers, buf): + """Process a GNU tar extended sparse header, version 0.0. + """ + offsets = [] + for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): + offsets.append(int(match.group(1))) + numbytes = [] + for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): + numbytes.append(int(match.group(1))) + next.sparse = list(zip(offsets, numbytes)) + + def _proc_gnusparse_01(self, next, pax_headers): + """Process a GNU tar extended sparse header, version 0.1. + """ + sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _proc_gnusparse_10(self, next, pax_headers, tarfile): + """Process a GNU tar extended sparse header, version 1.0. + """ + fields = None + sparse = [] + buf = tarfile.fileobj.read(BLOCKSIZE) + fields, buf = buf.split(b"\n", 1) + fields = int(fields) + while len(sparse) < fields * 2: + if b"\n" not in buf: + buf += tarfile.fileobj.read(BLOCKSIZE) + number, buf = buf.split(b"\n", 1) + sparse.append(int(number)) + next.offset_data = tarfile.fileobj.tell() + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _apply_pax_info(self, pax_headers, encoding, errors): + """Replace fields with supplemental information from a previous + pax extended or global header. + """ + for keyword, value in pax_headers.items(): + if keyword == "GNU.sparse.name": + setattr(self, "path", value) + elif keyword == "GNU.sparse.size": + setattr(self, "size", int(value)) + elif keyword == "GNU.sparse.realsize": + setattr(self, "size", int(value)) + elif keyword in PAX_FIELDS: + if keyword in PAX_NUMBER_FIELDS: + try: + value = PAX_NUMBER_FIELDS[keyword](value) + except ValueError: + value = 0 + if keyword == "path": + value = value.rstrip("/") + setattr(self, keyword, value) + + self.pax_headers = pax_headers.copy() + + def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): + """Decode a single field from a pax record. + """ + try: + return value.decode(encoding, "strict") + except UnicodeDecodeError: + return value.decode(fallback_encoding, fallback_errors) + + def _block(self, count): + """Round up a byte count by BLOCKSIZE and return it, + e.g. _block(834) => 1024. + """ + blocks, remainder = divmod(count, BLOCKSIZE) + if remainder: + blocks += 1 + return blocks * BLOCKSIZE + + def isreg(self): + return self.type in REGULAR_TYPES + def isfile(self): + return self.isreg() + def isdir(self): + return self.type == DIRTYPE + def issym(self): + return self.type == SYMTYPE + def islnk(self): + return self.type == LNKTYPE + def ischr(self): + return self.type == CHRTYPE + def isblk(self): + return self.type == BLKTYPE + def isfifo(self): + return self.type == FIFOTYPE + def issparse(self): + return self.sparse is not None + def isdev(self): + return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) +# class TarInfo + +class TarFile(object): + """The TarFile Class provides an interface to tar archives. + """ + + debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) + + dereference = False # If true, add content of linked file to the + # tar file, else the link. + + ignore_zeros = False # If true, skips empty or invalid blocks and + # continues processing. + + errorlevel = 1 # If 0, fatal errors only appear in debug + # messages (if debug >= 0). If > 0, errors + # are passed to the caller as exceptions. + + format = DEFAULT_FORMAT # The format to use when creating an archive. + + encoding = ENCODING # Encoding for 8-bit character strings. + + errors = None # Error handler for unicode conversion. + + tarinfo = TarInfo # The default TarInfo class to use. + + fileobject = ExFileObject # The default ExFileObject class to use. + + def __init__(self, name=None, mode="r", fileobj=None, format=None, + tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, + errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None): + """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. `mode' + defaults to 'r'. + If `fileobj' is given, it is used for reading or writing data. If it + can be determined, `mode' is overridden by `fileobj's mode. + `fileobj' is not closed, when TarFile is closed. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + self.mode = mode + self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] + + if not fileobj: + if self.mode == "a" and not os.path.exists(name): + # Create nonexistent files in append mode. + self.mode = "w" + self._mode = "wb" + fileobj = bltn_open(name, self._mode) + self._extfileobj = False + else: + if name is None and hasattr(fileobj, "name"): + name = fileobj.name + if hasattr(fileobj, "mode"): + self._mode = fileobj.mode + self._extfileobj = True + self.name = os.path.abspath(name) if name else None + self.fileobj = fileobj + + # Init attributes. + if format is not None: + self.format = format + if tarinfo is not None: + self.tarinfo = tarinfo + if dereference is not None: + self.dereference = dereference + if ignore_zeros is not None: + self.ignore_zeros = ignore_zeros + if encoding is not None: + self.encoding = encoding + self.errors = errors + + if pax_headers is not None and self.format == PAX_FORMAT: + self.pax_headers = pax_headers + else: + self.pax_headers = {} + + if debug is not None: + self.debug = debug + if errorlevel is not None: + self.errorlevel = errorlevel + + # Init datastructures. + self.closed = False + self.members = [] # list of members as TarInfo objects + self._loaded = False # flag if all members have been read + self.offset = self.fileobj.tell() + # current position in the archive file + self.inodes = {} # dictionary caching the inodes of + # archive members already added + + try: + if self.mode == "r": + self.firstmember = None + self.firstmember = self.next() + + if self.mode == "a": + # Move to the end of the archive, + # before the first empty block. + while True: + self.fileobj.seek(self.offset) + try: + tarinfo = self.tarinfo.fromtarfile(self) + self.members.append(tarinfo) + except EOFHeaderError: + self.fileobj.seek(self.offset) + break + except HeaderError as e: + raise ReadError(str(e)) + + if self.mode in "aw": + self._loaded = True + + if self.pax_headers: + buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) + self.fileobj.write(buf) + self.offset += len(buf) + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + #-------------------------------------------------------------------------- + # Below are the classmethods which act as alternate constructors to the + # TarFile class. The open() method is the only one that is needed for + # public use; it is the "super"-constructor and is able to select an + # adequate "sub"-constructor for a particular compression using the mapping + # from OPEN_METH. + # + # This concept allows one to subclass TarFile without losing the comfort of + # the super-constructor. A sub-constructor is registered and made available + # by adding it to the mapping in OPEN_METH. + + @classmethod + def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): + """Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + """ + + if not name and not fileobj: + raise ValueError("nothing to open") + + if mode in ("r", "r:*"): + # Find out which *open() is appropriate for opening the file. + for comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + if fileobj is not None: + saved_pos = fileobj.tell() + try: + return func(name, "r", fileobj, **kwargs) + except (ReadError, CompressionError) as e: + if fileobj is not None: + fileobj.seek(saved_pos) + continue + raise ReadError("file could not be opened successfully") + + elif ":" in mode: + filemode, comptype = mode.split(":", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + # Select the *open() function according to + # given compression. + if comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + else: + raise CompressionError("unknown compression type %r" % comptype) + return func(name, filemode, fileobj, **kwargs) + + elif "|" in mode: + filemode, comptype = mode.split("|", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + if filemode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + stream = _Stream(name, filemode, comptype, fileobj, bufsize) + try: + t = cls(name, filemode, stream, **kwargs) + except: + stream.close() + raise + t._extfileobj = False + return t + + elif mode in "aw": + return cls.taropen(name, mode, fileobj, **kwargs) + + raise ValueError("undiscernible mode") + + @classmethod + def taropen(cls, name, mode="r", fileobj=None, **kwargs): + """Open uncompressed tar archive name for reading or writing. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + return cls(name, mode, fileobj, **kwargs) + + @classmethod + def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open gzip compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + try: + import gzip + gzip.GzipFile + except (ImportError, AttributeError): + raise CompressionError("gzip module is not available") + + extfileobj = fileobj is not None + try: + fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) + t = cls.taropen(name, mode, fileobj, **kwargs) + except IOError: + if not extfileobj and fileobj is not None: + fileobj.close() + if fileobj is None: + raise + raise ReadError("not a gzip file") + except: + if not extfileobj and fileobj is not None: + fileobj.close() + raise + t._extfileobj = extfileobj + return t + + @classmethod + def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open bzip2 compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'.") + + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + + if fileobj is not None: + fileobj = _BZ2Proxy(fileobj, mode) + else: + fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) + + try: + t = cls.taropen(name, mode, fileobj, **kwargs) + except (IOError, EOFError): + fileobj.close() + raise ReadError("not a bzip2 file") + t._extfileobj = False + return t + + # All *open() methods are registered here. + OPEN_METH = { + "tar": "taropen", # uncompressed tar + "gz": "gzopen", # gzip compressed tar + "bz2": "bz2open" # bzip2 compressed tar + } + + #-------------------------------------------------------------------------- + # The public methods which TarFile provides: + + def close(self): + """Close the TarFile. In write-mode, two finishing zero blocks are + appended to the archive. + """ + if self.closed: + return + + if self.mode in "aw": + self.fileobj.write(NUL * (BLOCKSIZE * 2)) + self.offset += (BLOCKSIZE * 2) + # fill up the end with zero-blocks + # (like option -b20 for tar does) + blocks, remainder = divmod(self.offset, RECORDSIZE) + if remainder > 0: + self.fileobj.write(NUL * (RECORDSIZE - remainder)) + + if not self._extfileobj: + self.fileobj.close() + self.closed = True + + def getmember(self, name): + """Return a TarInfo object for member `name'. If `name' can not be + found in the archive, KeyError is raised. If a member occurs more + than once in the archive, its last occurrence is assumed to be the + most up-to-date version. + """ + tarinfo = self._getmember(name) + if tarinfo is None: + raise KeyError("filename %r not found" % name) + return tarinfo + + def getmembers(self): + """Return the members of the archive as a list of TarInfo objects. The + list has the same order as the members in the archive. + """ + self._check() + if not self._loaded: # if we want to obtain a list of + self._load() # all members, we first have to + # scan the whole archive. + return self.members + + def getnames(self): + """Return the members of the archive as a list of their names. It has + the same order as the list returned by getmembers(). + """ + return [tarinfo.name for tarinfo in self.getmembers()] + + def gettarinfo(self, name=None, arcname=None, fileobj=None): + """Create a TarInfo object for either the file `name' or the file + object `fileobj' (using os.fstat on its file descriptor). You can + modify some of the TarInfo's attributes before you add it using + addfile(). If given, `arcname' specifies an alternative name for the + file in the archive. + """ + self._check("aw") + + # When fileobj is given, replace name by + # fileobj's real name. + if fileobj is not None: + name = fileobj.name + + # Building the name of the member in the archive. + # Backward slashes are converted to forward slashes, + # Absolute paths are turned to relative paths. + if arcname is None: + arcname = name + drv, arcname = os.path.splitdrive(arcname) + arcname = arcname.replace(os.sep, "/") + arcname = arcname.lstrip("/") + + # Now, fill the TarInfo object with + # information specific for the file. + tarinfo = self.tarinfo() + tarinfo.tarfile = self + + # Use os.stat or os.lstat, depending on platform + # and if symlinks shall be resolved. + if fileobj is None: + if hasattr(os, "lstat") and not self.dereference: + statres = os.lstat(name) + else: + statres = os.stat(name) + else: + statres = os.fstat(fileobj.fileno()) + linkname = "" + + stmd = statres.st_mode + if stat.S_ISREG(stmd): + inode = (statres.st_ino, statres.st_dev) + if not self.dereference and statres.st_nlink > 1 and \ + inode in self.inodes and arcname != self.inodes[inode]: + # Is it a hardlink to an already + # archived file? + type = LNKTYPE + linkname = self.inodes[inode] + else: + # The inode is added only if its valid. + # For win32 it is always 0. + type = REGTYPE + if inode[0]: + self.inodes[inode] = arcname + elif stat.S_ISDIR(stmd): + type = DIRTYPE + elif stat.S_ISFIFO(stmd): + type = FIFOTYPE + elif stat.S_ISLNK(stmd): + type = SYMTYPE + linkname = os.readlink(name) + elif stat.S_ISCHR(stmd): + type = CHRTYPE + elif stat.S_ISBLK(stmd): + type = BLKTYPE + else: + return None + + # Fill the TarInfo object with all + # information we can get. + tarinfo.name = arcname + tarinfo.mode = stmd + tarinfo.uid = statres.st_uid + tarinfo.gid = statres.st_gid + if type == REGTYPE: + tarinfo.size = statres.st_size + else: + tarinfo.size = 0 + tarinfo.mtime = statres.st_mtime + tarinfo.type = type + tarinfo.linkname = linkname + if pwd: + try: + tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] + except KeyError: + pass + if grp: + try: + tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] + except KeyError: + pass + + if type in (CHRTYPE, BLKTYPE): + if hasattr(os, "major") and hasattr(os, "minor"): + tarinfo.devmajor = os.major(statres.st_rdev) + tarinfo.devminor = os.minor(statres.st_rdev) + return tarinfo + + def list(self, verbose=True): + """Print a table of contents to sys.stdout. If `verbose' is False, only + the names of the members are printed. If it is True, an `ls -l'-like + output is produced. + """ + self._check() + + for tarinfo in self: + if verbose: + print(filemode(tarinfo.mode), end=' ') + print("%s/%s" % (tarinfo.uname or tarinfo.uid, + tarinfo.gname or tarinfo.gid), end=' ') + if tarinfo.ischr() or tarinfo.isblk(): + print("%10s" % ("%d,%d" \ + % (tarinfo.devmajor, tarinfo.devminor)), end=' ') + else: + print("%10d" % tarinfo.size, end=' ') + print("%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6], end=' ') + + print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ') + + if verbose: + if tarinfo.issym(): + print("->", tarinfo.linkname, end=' ') + if tarinfo.islnk(): + print("link to", tarinfo.linkname, end=' ') + print() + + def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): + """Add the file `name' to the archive. `name' may be any type of file + (directory, fifo, symbolic link, etc.). If given, `arcname' + specifies an alternative name for the file in the archive. + Directories are added recursively by default. This can be avoided by + setting `recursive' to False. `exclude' is a function that should + return True for each filename to be excluded. `filter' is a function + that expects a TarInfo object argument and returns the changed + TarInfo object, if it returns None the TarInfo object will be + excluded from the archive. + """ + self._check("aw") + + if arcname is None: + arcname = name + + # Exclude pathnames. + if exclude is not None: + import warnings + warnings.warn("use the filter argument instead", + DeprecationWarning, 2) + if exclude(name): + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Skip if somebody tries to archive the archive... + if self.name is not None and os.path.abspath(name) == self.name: + self._dbg(2, "tarfile: Skipped %r" % name) + return + + self._dbg(1, name) + + # Create a TarInfo object from the file. + tarinfo = self.gettarinfo(name, arcname) + + if tarinfo is None: + self._dbg(1, "tarfile: Unsupported type %r" % name) + return + + # Change or exclude the TarInfo object. + if filter is not None: + tarinfo = filter(tarinfo) + if tarinfo is None: + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Append the tar header and data to the archive. + if tarinfo.isreg(): + f = bltn_open(name, "rb") + self.addfile(tarinfo, f) + f.close() + + elif tarinfo.isdir(): + self.addfile(tarinfo) + if recursive: + for f in os.listdir(name): + self.add(os.path.join(name, f), os.path.join(arcname, f), + recursive, exclude, filter=filter) + + else: + self.addfile(tarinfo) + + def addfile(self, tarinfo, fileobj=None): + """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is + given, tarinfo.size bytes are read from it and added to the archive. + You can create TarInfo objects using gettarinfo(). + On Windows platforms, `fileobj' should always be opened with mode + 'rb' to avoid irritation about the file size. + """ + self._check("aw") + + tarinfo = copy.copy(tarinfo) + + buf = tarinfo.tobuf(self.format, self.encoding, self.errors) + self.fileobj.write(buf) + self.offset += len(buf) + + # If there's data to follow, append it. + if fileobj is not None: + copyfileobj(fileobj, self.fileobj, tarinfo.size) + blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) + if remainder > 0: + self.fileobj.write(NUL * (BLOCKSIZE - remainder)) + blocks += 1 + self.offset += blocks * BLOCKSIZE + + self.members.append(tarinfo) + + def extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 0o700 + # Do not set_attrs directories, as we will do that further down + self.extract(tarinfo, path, set_attrs=not tarinfo.isdir()) + + # Reverse sort directories. + directories.sort(key=lambda a: a.name) + directories.reverse() + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extract(self, member, path="", set_attrs=True): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. `member' may be a filename or a TarInfo object. You can + specify a different directory using `path'. File attributes (owner, + mtime, mode) are set unless `set_attrs' is False. + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + # Prepare the link target for makelink(). + if tarinfo.islnk(): + tarinfo._link_target = os.path.join(path, tarinfo.linkname) + + try: + self._extract_member(tarinfo, os.path.join(path, tarinfo.name), + set_attrs=set_attrs) + except EnvironmentError as e: + if self.errorlevel > 0: + raise + else: + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) + else: + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extractfile(self, member): + """Extract a member from the archive as a file object. `member' may be + a filename or a TarInfo object. If `member' is a regular file, a + file-like object is returned. If `member' is a link, a file-like + object is constructed from the link's target. If `member' is none of + the above, None is returned. + The file-like object is read-only and provides the following + methods: read(), readline(), readlines(), seek() and tell() + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + if tarinfo.isreg(): + return self.fileobject(self, tarinfo) + + elif tarinfo.type not in SUPPORTED_TYPES: + # If a member's type is unknown, it is treated as a + # regular file. + return self.fileobject(self, tarinfo) + + elif tarinfo.islnk() or tarinfo.issym(): + if isinstance(self.fileobj, _Stream): + # A small but ugly workaround for the case that someone tries + # to extract a (sym)link as a file-object from a non-seekable + # stream of tar blocks. + raise StreamError("cannot extract (sym)link as file object") + else: + # A (sym)link's file object is its target's file object. + return self.extractfile(self._find_link_target(tarinfo)) + else: + # If there's no data associated with the member (directory, chrdev, + # blkdev, etc.), return None instead of a file object. + return None + + def _extract_member(self, tarinfo, targetpath, set_attrs=True): + """Extract the TarInfo object tarinfo to a physical + file called targetpath. + """ + # Fetch the TarInfo object for the given name + # and build the destination pathname, replacing + # forward slashes to platform specific separators. + targetpath = targetpath.rstrip("/") + targetpath = targetpath.replace("/", os.sep) + + # Create all upper directories. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + # Create directories that are not part of the archive with + # default permissions. + os.makedirs(upperdirs) + + if tarinfo.islnk() or tarinfo.issym(): + self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) + else: + self._dbg(1, tarinfo.name) + + if tarinfo.isreg(): + self.makefile(tarinfo, targetpath) + elif tarinfo.isdir(): + self.makedir(tarinfo, targetpath) + elif tarinfo.isfifo(): + self.makefifo(tarinfo, targetpath) + elif tarinfo.ischr() or tarinfo.isblk(): + self.makedev(tarinfo, targetpath) + elif tarinfo.islnk() or tarinfo.issym(): + self.makelink(tarinfo, targetpath) + elif tarinfo.type not in SUPPORTED_TYPES: + self.makeunknown(tarinfo, targetpath) + else: + self.makefile(tarinfo, targetpath) + + if set_attrs: + self.chown(tarinfo, targetpath) + if not tarinfo.issym(): + self.chmod(tarinfo, targetpath) + self.utime(tarinfo, targetpath) + + #-------------------------------------------------------------------------- + # Below are the different file methods. They are called via + # _extract_member() when extract() is called. They can be replaced in a + # subclass to implement other functionality. + + def makedir(self, tarinfo, targetpath): + """Make a directory called targetpath. + """ + try: + # Use a safe mode for the directory, the real mode is set + # later in _extract_member(). + os.mkdir(targetpath, 0o700) + except EnvironmentError as e: + if e.errno != errno.EEXIST: + raise + + def makefile(self, tarinfo, targetpath): + """Make a file called targetpath. + """ + source = self.fileobj + source.seek(tarinfo.offset_data) + target = bltn_open(targetpath, "wb") + if tarinfo.sparse is not None: + for offset, size in tarinfo.sparse: + target.seek(offset) + copyfileobj(source, target, size) + else: + copyfileobj(source, target, tarinfo.size) + target.seek(tarinfo.size) + target.truncate() + target.close() + + def makeunknown(self, tarinfo, targetpath): + """Make a file from a TarInfo object with an unknown type + at targetpath. + """ + self.makefile(tarinfo, targetpath) + self._dbg(1, "tarfile: Unknown file type %r, " \ + "extracted as regular file." % tarinfo.type) + + def makefifo(self, tarinfo, targetpath): + """Make a fifo called targetpath. + """ + if hasattr(os, "mkfifo"): + os.mkfifo(targetpath) + else: + raise ExtractError("fifo not supported by system") + + def makedev(self, tarinfo, targetpath): + """Make a character or block device called targetpath. + """ + if not hasattr(os, "mknod") or not hasattr(os, "makedev"): + raise ExtractError("special devices not supported by system") + + mode = tarinfo.mode + if tarinfo.isblk(): + mode |= stat.S_IFBLK + else: + mode |= stat.S_IFCHR + + os.mknod(targetpath, mode, + os.makedev(tarinfo.devmajor, tarinfo.devminor)) + + def makelink(self, tarinfo, targetpath): + """Make a (symbolic) link called targetpath. If it cannot be created + (platform limitation), we try to make a copy of the referenced file + instead of a link. + """ + try: + # For systems that support symbolic and hard links. + if tarinfo.issym(): + os.symlink(tarinfo.linkname, targetpath) + else: + # See extract(). + if os.path.exists(tarinfo._link_target): + os.link(tarinfo._link_target, targetpath) + else: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except symlink_exception: + if tarinfo.issym(): + linkpath = os.path.join(os.path.dirname(tarinfo.name), + tarinfo.linkname) + else: + linkpath = tarinfo.linkname + else: + try: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except KeyError: + raise ExtractError("unable to resolve link inside archive") + + def chown(self, tarinfo, targetpath): + """Set owner of targetpath according to tarinfo. + """ + if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: + # We have to be root to do so. + try: + g = grp.getgrnam(tarinfo.gname)[2] + except KeyError: + g = tarinfo.gid + try: + u = pwd.getpwnam(tarinfo.uname)[2] + except KeyError: + u = tarinfo.uid + try: + if tarinfo.issym() and hasattr(os, "lchown"): + os.lchown(targetpath, u, g) + else: + if sys.platform != "os2emx": + os.chown(targetpath, u, g) + except EnvironmentError as e: + raise ExtractError("could not change owner") + + def chmod(self, tarinfo, targetpath): + """Set file permissions of targetpath according to tarinfo. + """ + if hasattr(os, 'chmod'): + try: + os.chmod(targetpath, tarinfo.mode) + except EnvironmentError as e: + raise ExtractError("could not change mode") + + def utime(self, tarinfo, targetpath): + """Set modification time of targetpath according to tarinfo. + """ + if not hasattr(os, 'utime'): + return + try: + os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) + except EnvironmentError as e: + raise ExtractError("could not change modification time") + + #-------------------------------------------------------------------------- + def next(self): + """Return the next member of the archive as a TarInfo object, when + TarFile is opened for reading. Return None if there is no more + available. + """ + self._check("ra") + if self.firstmember is not None: + m = self.firstmember + self.firstmember = None + return m + + # Read the next block. + self.fileobj.seek(self.offset) + tarinfo = None + while True: + try: + tarinfo = self.tarinfo.fromtarfile(self) + except EOFHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + except InvalidHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + elif self.offset == 0: + raise ReadError(str(e)) + except EmptyHeaderError: + if self.offset == 0: + raise ReadError("empty file") + except TruncatedHeaderError as e: + if self.offset == 0: + raise ReadError(str(e)) + except SubsequentHeaderError as e: + raise ReadError(str(e)) + break + + if tarinfo is not None: + self.members.append(tarinfo) + else: + self._loaded = True + + return tarinfo + + #-------------------------------------------------------------------------- + # Little helper methods: + + def _getmember(self, name, tarinfo=None, normalize=False): + """Find an archive member by name from bottom to top. + If tarinfo is given, it is used as the starting point. + """ + # Ensure that all members have been loaded. + members = self.getmembers() + + # Limit the member search list up to tarinfo. + if tarinfo is not None: + members = members[:members.index(tarinfo)] + + if normalize: + name = os.path.normpath(name) + + for member in reversed(members): + if normalize: + member_name = os.path.normpath(member.name) + else: + member_name = member.name + + if name == member_name: + return member + + def _load(self): + """Read through the entire archive file and look for readable + members. + """ + while True: + tarinfo = self.next() + if tarinfo is None: + break + self._loaded = True + + def _check(self, mode=None): + """Check if TarFile is still open, and if the operation's mode + corresponds to TarFile's mode. + """ + if self.closed: + raise IOError("%s is closed" % self.__class__.__name__) + if mode is not None and self.mode not in mode: + raise IOError("bad operation for mode %r" % self.mode) + + def _find_link_target(self, tarinfo): + """Find the target member of a symlink or hardlink member in the + archive. + """ + if tarinfo.issym(): + # Always search the entire archive. + linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname + limit = None + else: + # Search the archive before the link, because a hard link is + # just a reference to an already archived file. + linkname = tarinfo.linkname + limit = tarinfo + + member = self._getmember(linkname, tarinfo=limit, normalize=True) + if member is None: + raise KeyError("linkname %r not found" % linkname) + return member + + def __iter__(self): + """Provide an iterator object. + """ + if self._loaded: + return iter(self.members) + else: + return TarIter(self) + + def _dbg(self, level, msg): + """Write debugging output to sys.stderr. + """ + if level <= self.debug: + print(msg, file=sys.stderr) + + def __enter__(self): + self._check() + return self + + def __exit__(self, type, value, traceback): + if type is None: + self.close() + else: + # An exception occurred. We must not call close() because + # it would try to write end-of-archive blocks and padding. + if not self._extfileobj: + self.fileobj.close() + self.closed = True +# class TarFile + +class TarIter(object): + """Iterator Class. + + for tarinfo in TarFile(...): + suite... + """ + + def __init__(self, tarfile): + """Construct a TarIter object. + """ + self.tarfile = tarfile + self.index = 0 + def __iter__(self): + """Return iterator object. + """ + return self + + def __next__(self): + """Return the next item using TarFile's next() method. + When all members have been read, set TarFile as _loaded. + """ + # Fix for SF #1100429: Under rare circumstances it can + # happen that getmembers() is called during iteration, + # which will cause TarIter to stop prematurely. + if not self.tarfile._loaded: + tarinfo = self.tarfile.next() + if not tarinfo: + self.tarfile._loaded = True + raise StopIteration + else: + try: + tarinfo = self.tarfile.members[self.index] + except IndexError: + raise StopIteration + self.index += 1 + return tarinfo + + next = __next__ # for Python 2.x + +#-------------------- +# exported functions +#-------------------- +def is_tarfile(name): + """Return True if name points to a tar archive that we + are able to handle, else return False. + """ + try: + t = open(name) + t.close() + return True + except TarError: + return False + +bltn_open = open +open = TarFile.open diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/compat.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/compat.py new file mode 100644 index 0000000..e594106 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/compat.py @@ -0,0 +1,1122 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import absolute_import + +import os +import re +import sys + +try: + import ssl +except ImportError: # pragma: no cover + ssl = None + +if sys.version_info[0] < 3: # pragma: no cover + from StringIO import StringIO + string_types = basestring, + text_type = unicode + from types import FileType as file_type + import __builtin__ as builtins + import ConfigParser as configparser + from ._backport import shutil + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, + pathname2url, ContentTooShortError, splittype) + + def quote(s): + if isinstance(s, unicode): + s = s.encode('utf-8') + return _quote(s) + + import urllib2 + from urllib2 import (Request, urlopen, URLError, HTTPError, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib2 import HTTPSHandler + import httplib + import xmlrpclib + import Queue as queue + from HTMLParser import HTMLParser + import htmlentitydefs + raw_input = raw_input + from itertools import ifilter as filter + from itertools import ifilterfalse as filterfalse + + # Leaving this around for now, in case it needs resurrecting in some way + # _userprog = None + # def splituser(host): + # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + # global _userprog + # if _userprog is None: + # import re + # _userprog = re.compile('^(.*)@(.*)$') + + # match = _userprog.match(host) + # if match: return match.group(1, 2) + # return None, host + +else: # pragma: no cover + from io import StringIO + string_types = str, + text_type = str + from io import TextIOWrapper as file_type + import builtins + import configparser + import shutil + from urllib.parse import (urlparse, urlunparse, urljoin, quote, + unquote, urlsplit, urlunsplit, splittype) + from urllib.request import (urlopen, urlretrieve, Request, url2pathname, + pathname2url, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib.request import HTTPSHandler + from urllib.error import HTTPError, URLError, ContentTooShortError + import http.client as httplib + import urllib.request as urllib2 + import xmlrpc.client as xmlrpclib + import queue + from html.parser import HTMLParser + import html.entities as htmlentitydefs + raw_input = input + from itertools import filterfalse + filter = filter + + +try: + from ssl import match_hostname, CertificateError +except ImportError: # pragma: no cover + class CertificateError(ValueError): + pass + + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + parts = dn.split('.') + leftmost, remainder = parts[0], parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +try: + from types import SimpleNamespace as Container +except ImportError: # pragma: no cover + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: # pragma: no cover + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if not normdir in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + +# ZipFile is a context manager in 2.7, but not in 2.6 + +from zipfile import ZipFile as BaseZipFile + +if hasattr(BaseZipFile, '__enter__'): # pragma: no cover + ZipFile = BaseZipFile +else: # pragma: no cover + from zipfile import ZipExtFile as BaseZipExtFile + + class ZipExtFile(BaseZipExtFile): + def __init__(self, base): + self.__dict__.update(base.__dict__) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + class ZipFile(BaseZipFile): + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + def open(self, *args, **kwargs): + base = BaseZipFile.open(self, *args, **kwargs) + return ZipExtFile(base) + +try: + from platform import python_implementation +except ImportError: # pragma: no cover + def python_implementation(): + """Return a string identifying the Python implementation.""" + if 'PyPy' in sys.version: + return 'PyPy' + if os.name == 'java': + return 'Jython' + if sys.version.startswith('IronPython'): + return 'IronPython' + return 'CPython' + +try: + import sysconfig +except ImportError: # pragma: no cover + from ._backport import sysconfig + +try: + callable = callable +except NameError: # pragma: no cover + from collections.abc import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode + fsdecode = os.fsdecode +except AttributeError: # pragma: no cover + # Issue #99: on some systems (e.g. containerised), + # sys.getfilesystemencoding() returns None, and we need a real value, + # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and + # sys.getfilesystemencoding(): the return value is "the user’s preference + # according to the result of nl_langinfo(CODESET), or None if the + # nl_langinfo(CODESET) failed." + _fsencoding = sys.getfilesystemencoding() or 'utf-8' + if _fsencoding == 'mbcs': + _fserrors = 'strict' + else: + _fserrors = 'surrogateescape' + + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, text_type): + return filename.encode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + def fsdecode(filename): + if isinstance(filename, text_type): + return filename + elif isinstance(filename, bytes): + return filename.decode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + +try: + from tokenize import detect_encoding +except ImportError: # pragma: no cover + from codecs import BOM_UTF8, lookup + import re + + cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") + + def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + + def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + try: + filename = readline.__self__.name + except AttributeError: + filename = None + bom_found = False + encoding = None + default = 'utf-8' + def read_or_stop(): + try: + return readline() + except StopIteration: + return b'' + + def find_cookie(line): + try: + # Decode as UTF-8. Either the line is an encoding declaration, + # in which case it should be pure ASCII, or it must be UTF-8 + # per default encoding. + line_string = line.decode('utf-8') + except UnicodeDecodeError: + msg = "invalid or missing encoding declaration" + if filename is not None: + msg = '{} for {!r}'.format(msg, filename) + raise SyntaxError(msg) + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + if filename is None: + msg = "unknown encoding: " + encoding + else: + msg = "unknown encoding for {!r}: {}".format(filename, + encoding) + raise SyntaxError(msg) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + if filename is None: + msg = 'encoding problem: utf-8' + else: + msg = 'encoding problem for {!r}: utf-8'.format(filename) + raise SyntaxError(msg) + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + +# For converting & <-> & etc. +try: + from html import escape +except ImportError: + from cgi import escape +if sys.version_info[:2] < (3, 4): + unescape = HTMLParser().unescape +else: + from html import unescape + +try: + from collections import ChainMap +except ImportError: # pragma: no cover + from collections import MutableMapping + + try: + from reprlib import recursive_repr as _recursive_repr + except ImportError: + def _recursive_repr(fillvalue='...'): + ''' + Decorator to make a repr function return fillvalue for a recursive + call + ''' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + +try: + from importlib.util import cache_from_source # Python >= 3.4 +except ImportError: # pragma: no cover + try: + from imp import cache_from_source + except ImportError: # pragma: no cover + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix + +try: + from collections import OrderedDict +except ImportError: # pragma: no cover +## {{{ http://code.activestate.com/recipes/576693/ (r9) +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. + try: + from thread import get_ident as _get_ident + except ImportError: + from dummy_thread import get_ident as _get_ident + + try: + from _abcoll import KeysView, ValuesView, ItemsView + except ImportError: + pass + + + class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running=None): + 'od.__repr__() <==> repr(od)' + if not _repr_running: _repr_running = {} + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext' : 'ext_convert', + 'cfg' : 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + #print d, rest + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + #rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance(value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance(value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and\ + isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/database.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/database.py new file mode 100644 index 0000000..0a90c30 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/database.py @@ -0,0 +1,1339 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""PEP 376 implementation.""" + +from __future__ import unicode_literals + +import base64 +import codecs +import contextlib +import hashlib +import logging +import os +import posixpath +import sys +import zipimport + +from . import DistlibException, resources +from .compat import StringIO +from .version import get_scheme, UnsupportedVersionError +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) +from .util import (parse_requirement, cached_property, parse_name_and_version, + read_exports, write_exports, CSVReader, CSVWriter) + + +__all__ = ['Distribution', 'BaseInstalledDistribution', + 'InstalledDistribution', 'EggInfoDistribution', + 'DistributionPath'] + + +logger = logging.getLogger(__name__) + +EXPORTS_FILENAME = 'pydist-exports.json' +COMMANDS_FILENAME = 'pydist-commands.json' + +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', + 'RESOURCES', EXPORTS_FILENAME, 'SHARED') + +DISTINFO_EXT = '.dist-info' + + +class _Cache(object): + """ + A simple cache mapping names and .dist-info paths to distributions + """ + def __init__(self): + """ + Initialise an instance. There is normally one for each DistributionPath. + """ + self.name = {} + self.path = {} + self.generated = False + + def clear(self): + """ + Clear the cache, setting it to its initial state. + """ + self.name.clear() + self.path.clear() + self.generated = False + + def add(self, dist): + """ + Add a distribution to the cache. + :param dist: The distribution to add. + """ + if dist.path not in self.path: + self.path[dist.path] = dist + self.name.setdefault(dist.key, []).append(dist) + + +class DistributionPath(object): + """ + Represents a set of distributions installed on a path (typically sys.path). + """ + def __init__(self, path=None, include_egg=False): + """ + Create an instance from a path, optionally including legacy (distutils/ + setuptools/distribute) distributions. + :param path: The path to use, as a list of directories. If not specified, + sys.path is used. + :param include_egg: If True, this instance will look for and return legacy + distributions as well as those based on PEP 376. + """ + if path is None: + path = sys.path + self.path = path + self._include_dist = True + self._include_egg = include_egg + + self._cache = _Cache() + self._cache_egg = _Cache() + self._cache_enabled = True + self._scheme = get_scheme('default') + + def _get_cache_enabled(self): + return self._cache_enabled + + def _set_cache_enabled(self, value): + self._cache_enabled = value + + cache_enabled = property(_get_cache_enabled, _set_cache_enabled) + + def clear_cache(self): + """ + Clears the internal cache. + """ + self._cache.clear() + self._cache_egg.clear() + + + def _yield_distributions(self): + """ + Yield .dist-info and/or .egg(-info) distributions. + """ + # We need to check if we've seen some resources already, because on + # some Linux systems (e.g. some Debian/Ubuntu variants) there are + # symlinks which alias other files in the environment. + seen = set() + for path in self.path: + finder = resources.finder_for_path(path) + if finder is None: + continue + r = finder.find('') + if not r or not r.is_container: + continue + rset = sorted(r.resources) + for entry in rset: + r = finder.find(entry) + if not r or r.path in seen: + continue + if self._include_dist and entry.endswith(DISTINFO_EXT): + possible_filenames = [METADATA_FILENAME, + WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME] + for metadata_filename in possible_filenames: + metadata_path = posixpath.join(entry, metadata_filename) + pydist = finder.find(metadata_path) + if pydist: + break + else: + continue + + with contextlib.closing(pydist.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + logger.debug('Found %s', r.path) + seen.add(r.path) + yield new_dist_class(r.path, metadata=metadata, + env=self) + elif self._include_egg and entry.endswith(('.egg-info', + '.egg')): + logger.debug('Found %s', r.path) + seen.add(r.path) + yield old_dist_class(r.path, self) + + def _generate_cache(self): + """ + Scan the path for distributions and populate the cache with + those that are found. + """ + gen_dist = not self._cache.generated + gen_egg = self._include_egg and not self._cache_egg.generated + if gen_dist or gen_egg: + for dist in self._yield_distributions(): + if isinstance(dist, InstalledDistribution): + self._cache.add(dist) + else: + self._cache_egg.add(dist) + + if gen_dist: + self._cache.generated = True + if gen_egg: + self._cache_egg.generated = True + + @classmethod + def distinfo_dirname(cls, name, version): + """ + The *name* and *version* parameters are converted into their + filename-escaped form, i.e. any ``'-'`` characters are replaced + with ``'_'`` other than the one in ``'dist-info'`` and the one + separating the name from the version number. + + :parameter name: is converted to a standard distribution name by replacing + any runs of non- alphanumeric characters with a single + ``'-'``. + :type name: string + :parameter version: is converted to a standard version string. Spaces + become dots, and all other non-alphanumeric characters + (except dots) become dashes, with runs of multiple + dashes condensed to a single dash. + :type version: string + :returns: directory name + :rtype: string""" + name = name.replace('-', '_') + return '-'.join([name, version]) + DISTINFO_EXT + + def get_distributions(self): + """ + Provides an iterator that looks for distributions and returns + :class:`InstalledDistribution` or + :class:`EggInfoDistribution` instances for each one of them. + + :rtype: iterator of :class:`InstalledDistribution` and + :class:`EggInfoDistribution` instances + """ + if not self._cache_enabled: + for dist in self._yield_distributions(): + yield dist + else: + self._generate_cache() + + for dist in self._cache.path.values(): + yield dist + + if self._include_egg: + for dist in self._cache_egg.path.values(): + yield dist + + def get_distribution(self, name): + """ + Looks for a named distribution on the path. + + This function only returns the first result found, as no more than one + value is expected. If nothing is found, ``None`` is returned. + + :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` + or ``None`` + """ + result = None + name = name.lower() + if not self._cache_enabled: + for dist in self._yield_distributions(): + if dist.key == name: + result = dist + break + else: + self._generate_cache() + + if name in self._cache.name: + result = self._cache.name[name][0] + elif self._include_egg and name in self._cache_egg.name: + result = self._cache_egg.name[name][0] + return result + + def provides_distribution(self, name, version=None): + """ + Iterates over all distributions to find which distributions provide *name*. + If a *version* is provided, it will be used to filter the results. + + This function only returns the first result found, since no more than + one values are expected. If the directory is not found, returns ``None``. + + :parameter version: a version specifier that indicates the version + required, conforming to the format in ``PEP-345`` + + :type name: string + :type version: string + """ + matcher = None + if version is not None: + try: + matcher = self._scheme.matcher('%s (%s)' % (name, version)) + except ValueError: + raise DistlibException('invalid name or version: %r, %r' % + (name, version)) + + for dist in self.get_distributions(): + # We hit a problem on Travis where enum34 was installed and doesn't + # have a provides attribute ... + if not hasattr(dist, 'provides'): + logger.debug('No "provides": %s', dist) + else: + provided = dist.provides + + for p in provided: + p_name, p_ver = parse_name_and_version(p) + if matcher is None: + if p_name == name: + yield dist + break + else: + if p_name == name and matcher.match(p_ver): + yield dist + break + + def get_file_path(self, name, relative_path): + """ + Return the path to a resource file. + """ + dist = self.get_distribution(name) + if dist is None: + raise LookupError('no distribution named %r found' % name) + return dist.get_resource_path(relative_path) + + def get_exported_entries(self, category, name=None): + """ + Return all of the exported entries in a particular category. + + :param category: The category to search for entries. + :param name: If specified, only entries with that name are returned. + """ + for dist in self.get_distributions(): + r = dist.exports + if category in r: + d = r[category] + if name is not None: + if name in d: + yield d[name] + else: + for v in d.values(): + yield v + + +class Distribution(object): + """ + A base class for distributions, whether installed or from indexes. + Either way, it must have some metadata, so that's all that's needed + for construction. + """ + + build_time_dependency = False + """ + Set to True if it's known to be only a build-time dependency (i.e. + not needed after installation). + """ + + requested = False + """A boolean that indicates whether the ``REQUESTED`` metadata file is + present (in other words, whether the package was installed by user + request or it was installed as a dependency).""" + + def __init__(self, metadata): + """ + Initialise an instance. + :param metadata: The instance of :class:`Metadata` describing this + distribution. + """ + self.metadata = metadata + self.name = metadata.name + self.key = self.name.lower() # for case-insensitive comparisons + self.version = metadata.version + self.locator = None + self.digest = None + self.extras = None # additional features requested + self.context = None # environment marker overrides + self.download_urls = set() + self.digests = {} + + @property + def source_url(self): + """ + The source archive download URL for this distribution. + """ + return self.metadata.source_url + + download_url = source_url # Backward compatibility + + @property + def name_and_version(self): + """ + A utility property which displays the name and version in parentheses. + """ + return '%s (%s)' % (self.name, self.version) + + @property + def provides(self): + """ + A set of distribution names and versions provided by this distribution. + :return: A set of "name (version)" strings. + """ + plist = self.metadata.provides + s = '%s (%s)' % (self.name, self.version) + if s not in plist: + plist.append(s) + return plist + + def _get_requirements(self, req_attr): + md = self.metadata + logger.debug('Getting requirements from metadata %r', md.todict()) + reqts = getattr(md, req_attr) + return set(md.get_requirements(reqts, extras=self.extras, + env=self.context)) + + @property + def run_requires(self): + return self._get_requirements('run_requires') + + @property + def meta_requires(self): + return self._get_requirements('meta_requires') + + @property + def build_requires(self): + return self._get_requirements('build_requires') + + @property + def test_requires(self): + return self._get_requirements('test_requires') + + @property + def dev_requires(self): + return self._get_requirements('dev_requires') + + def matches_requirement(self, req): + """ + Say if this instance matches (fulfills) a requirement. + :param req: The requirement to match. + :rtype req: str + :return: True if it matches, else False. + """ + # Requirement may contain extras - parse to lose those + # from what's passed to the matcher + r = parse_requirement(req) + scheme = get_scheme(self.metadata.scheme) + try: + matcher = scheme.matcher(r.requirement) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + result = False + for p in self.provides: + p_name, p_ver = parse_name_and_version(p) + if p_name != name: + continue + try: + result = matcher.match(p_ver) + break + except UnsupportedVersionError: + pass + return result + + def __repr__(self): + """ + Return a textual representation of this instance, + """ + if self.source_url: + suffix = ' [%s]' % self.source_url + else: + suffix = '' + return '' % (self.name, self.version, suffix) + + def __eq__(self, other): + """ + See if this distribution is the same as another. + :param other: The distribution to compare with. To be equal to one + another. distributions must have the same type, name, + version and source_url. + :return: True if it is the same, else False. + """ + if type(other) is not type(self): + result = False + else: + result = (self.name == other.name and + self.version == other.version and + self.source_url == other.source_url) + return result + + def __hash__(self): + """ + Compute hash in a way which matches the equality test. + """ + return hash(self.name) + hash(self.version) + hash(self.source_url) + + +class BaseInstalledDistribution(Distribution): + """ + This is the base class for installed distributions (whether PEP 376 or + legacy). + """ + + hasher = None + + def __init__(self, metadata, path, env=None): + """ + Initialise an instance. + :param metadata: An instance of :class:`Metadata` which describes the + distribution. This will normally have been initialised + from a metadata file in the ``path``. + :param path: The path of the ``.dist-info`` or ``.egg-info`` + directory for the distribution. + :param env: This is normally the :class:`DistributionPath` + instance where this distribution was found. + """ + super(BaseInstalledDistribution, self).__init__(metadata) + self.path = path + self.dist_path = env + + def get_hash(self, data, hasher=None): + """ + Get the hash of some data, using a particular hash algorithm, if + specified. + + :param data: The data to be hashed. + :type data: bytes + :param hasher: The name of a hash implementation, supported by hashlib, + or ``None``. Examples of valid values are ``'sha1'``, + ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and + ``'sha512'``. If no hasher is specified, the ``hasher`` + attribute of the :class:`InstalledDistribution` instance + is used. If the hasher is determined to be ``None``, MD5 + is used as the hashing algorithm. + :returns: The hash of the data. If a hasher was explicitly specified, + the returned hash will be prefixed with the specified hasher + followed by '='. + :rtype: str + """ + if hasher is None: + hasher = self.hasher + if hasher is None: + hasher = hashlib.md5 + prefix = '' + else: + hasher = getattr(hashlib, hasher) + prefix = '%s=' % self.hasher + digest = hasher(data).digest() + digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') + return '%s%s' % (prefix, digest) + + +class InstalledDistribution(BaseInstalledDistribution): + """ + Created with the *path* of the ``.dist-info`` directory provided to the + constructor. It reads the metadata contained in ``pydist.json`` when it is + instantiated., or uses a passed in Metadata instance (useful for when + dry-run mode is being used). + """ + + hasher = 'sha256' + + def __init__(self, path, metadata=None, env=None): + self.modules = [] + self.finder = finder = resources.finder_for_path(path) + if finder is None: + raise ValueError('finder unavailable for %s' % path) + if env and env._cache_enabled and path in env._cache.path: + metadata = env._cache.path[path].metadata + elif metadata is None: + r = finder.find(METADATA_FILENAME) + # Temporary - for Wheel 0.23 support + if r is None: + r = finder.find(WHEEL_METADATA_FILENAME) + # Temporary - for legacy support + if r is None: + r = finder.find(LEGACY_METADATA_FILENAME) + if r is None: + raise ValueError('no %s found in %s' % (METADATA_FILENAME, + path)) + with contextlib.closing(r.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + + super(InstalledDistribution, self).__init__(metadata, path, env) + + if env and env._cache_enabled: + env._cache.add(self) + + r = finder.find('REQUESTED') + self.requested = r is not None + p = os.path.join(path, 'top_level.txt') + if os.path.exists(p): + with open(p, 'rb') as f: + data = f.read().decode('utf-8') + self.modules = data.splitlines() + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def _get_records(self): + """ + Get the list of installed files for the distribution + :return: A list of tuples of path, hash and size. Note that hash and + size might be ``None`` for some entries. The path is exactly + as stored in the file (which is as in PEP 376). + """ + results = [] + r = self.get_distinfo_resource('RECORD') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as record_reader: + # Base location is parent dir of .dist-info dir + #base_location = os.path.dirname(self.path) + #base_location = os.path.abspath(base_location) + for row in record_reader: + missing = [None for i in range(len(row), 3)] + path, checksum, size = row + missing + #if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) + results.append((path, checksum, size)) + return results + + @cached_property + def exports(self): + """ + Return the information exported by this distribution. + :return: A dictionary of exports, mapping an export category to a dict + of :class:`ExportEntry` instances describing the individual + export entries, and keyed by name. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + result = self.read_exports() + return result + + def read_exports(self): + """ + Read exports data from a file in .ini format. + + :return: A dictionary of exports, mapping an export category to a list + of :class:`ExportEntry` instances describing the individual + export entries. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + with contextlib.closing(r.as_stream()) as stream: + result = read_exports(stream) + return result + + def write_exports(self, exports): + """ + Write a dictionary of exports to a file in .ini format. + :param exports: A dictionary of exports, mapping an export category to + a list of :class:`ExportEntry` instances describing the + individual export entries. + """ + rf = self.get_distinfo_file(EXPORTS_FILENAME) + with open(rf, 'w') as f: + write_exports(exports, f) + + def get_resource_path(self, relative_path): + """ + NOTE: This API may change in the future. + + Return the absolute path to a resource file with the given relative + path. + + :param relative_path: The path, relative to .dist-info, of the resource + of interest. + :return: The absolute path where the resource is to be found. + """ + r = self.get_distinfo_resource('RESOURCES') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as resources_reader: + for relative, destination in resources_reader: + if relative == relative_path: + return destination + raise KeyError('no resource file with relative path %r ' + 'is installed' % relative_path) + + def list_installed_files(self): + """ + Iterates over the ``RECORD`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: iterator of (path, hash, size) + """ + for result in self._get_records(): + yield result + + def write_installed_files(self, paths, prefix, dry_run=False): + """ + Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any + existing ``RECORD`` file is silently overwritten. + + prefix is used to determine when to write absolute paths. + """ + prefix = os.path.join(prefix, '') + base = os.path.dirname(self.path) + base_under_prefix = base.startswith(prefix) + base = os.path.join(base, '') + record_path = self.get_distinfo_file('RECORD') + logger.info('creating %s', record_path) + if dry_run: + return None + with CSVWriter(record_path) as writer: + for path in paths: + if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): + # do not put size and hash, as in PEP-376 + hash_value = size = '' + else: + size = '%d' % os.path.getsize(path) + with open(path, 'rb') as fp: + hash_value = self.get_hash(fp.read()) + if path.startswith(base) or (base_under_prefix and + path.startswith(prefix)): + path = os.path.relpath(path, base) + writer.writerow((path, hash_value, size)) + + # add the RECORD file itself + if record_path.startswith(base): + record_path = os.path.relpath(record_path, base) + writer.writerow((record_path, '', '')) + return record_path + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + base = os.path.dirname(self.path) + record_path = self.get_distinfo_file('RECORD') + for path, hash_value, size in self.list_installed_files(): + if not os.path.isabs(path): + path = os.path.join(base, path) + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + elif os.path.isfile(path): + actual_size = str(os.path.getsize(path)) + if size and actual_size != size: + mismatches.append((path, 'size', size, actual_size)) + elif hash_value: + if '=' in hash_value: + hasher = hash_value.split('=', 1)[0] + else: + hasher = None + + with open(path, 'rb') as f: + actual_hash = self.get_hash(f.read(), hasher) + if actual_hash != hash_value: + mismatches.append((path, 'hash', hash_value, actual_hash)) + return mismatches + + @cached_property + def shared_locations(self): + """ + A dictionary of shared locations whose keys are in the set 'prefix', + 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. + The corresponding value is the absolute path of that category for + this distribution, and takes into account any paths selected by the + user at installation time (e.g. via command-line arguments). In the + case of the 'namespace' key, this would be a list of absolute paths + for the roots of namespace packages in this distribution. + + The first time this property is accessed, the relevant information is + read from the SHARED file in the .dist-info directory. + """ + result = {} + shared_path = os.path.join(self.path, 'SHARED') + if os.path.isfile(shared_path): + with codecs.open(shared_path, 'r', encoding='utf-8') as f: + lines = f.read().splitlines() + for line in lines: + key, value = line.split('=', 1) + if key == 'namespace': + result.setdefault(key, []).append(value) + else: + result[key] = value + return result + + def write_shared_locations(self, paths, dry_run=False): + """ + Write shared location information to the SHARED file in .dist-info. + :param paths: A dictionary as described in the documentation for + :meth:`shared_locations`. + :param dry_run: If True, the action is logged but no file is actually + written. + :return: The path of the file written to. + """ + shared_path = os.path.join(self.path, 'SHARED') + logger.info('creating %s', shared_path) + if dry_run: + return None + lines = [] + for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): + path = paths[key] + if os.path.isdir(paths[key]): + lines.append('%s=%s' % (key, path)) + for ns in paths.get('namespace', ()): + lines.append('namespace=%s' % ns) + + with codecs.open(shared_path, 'w', encoding='utf-8') as f: + f.write('\n'.join(lines)) + return shared_path + + def get_distinfo_resource(self, path): + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + finder = resources.finder_for_path(self.path) + if finder is None: + raise DistlibException('Unable to get a finder for %s' % self.path) + return finder.find(path) + + def get_distinfo_file(self, path): + """ + Returns a path located under the ``.dist-info`` directory. Returns a + string representing the path. + + :parameter path: a ``'/'``-separated path relative to the + ``.dist-info`` directory or an absolute path; + If *path* is an absolute path and doesn't start + with the ``.dist-info`` directory path, + a :class:`DistlibException` is raised + :type path: str + :rtype: str + """ + # Check if it is an absolute path # XXX use relpath, add tests + if path.find(os.sep) >= 0: + # it's an absolute path? + distinfo_dirname, path = path.split(os.sep)[-2:] + if distinfo_dirname != self.path.split(os.sep)[-1]: + raise DistlibException( + 'dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) + + # The file must be relative + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + + return os.path.join(self.path, path) + + def list_distinfo_files(self): + """ + Iterates over the ``RECORD`` entries and returns paths for each line if + the path is pointing to a file located in the ``.dist-info`` directory + or one of its subdirectories. + + :returns: iterator of paths + """ + base = os.path.dirname(self.path) + for path, checksum, size in self._get_records(): + # XXX add separator or use real relpath algo + if not os.path.isabs(path): + path = os.path.join(base, path) + if path.startswith(self.path): + yield path + + def __eq__(self, other): + return (isinstance(other, InstalledDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +class EggInfoDistribution(BaseInstalledDistribution): + """Created with the *path* of the ``.egg-info`` directory or file provided + to the constructor. It reads the metadata contained in the file itself, or + if the given path happens to be a directory, the metadata is read from the + file ``PKG-INFO`` under that directory.""" + + requested = True # as we have no way of knowing, assume it was + shared_locations = {} + + def __init__(self, path, env=None): + def set_name_and_version(s, n, v): + s.name = n + s.key = n.lower() # for case-insensitive comparisons + s.version = v + + self.path = path + self.dist_path = env + if env and env._cache_enabled and path in env._cache_egg.path: + metadata = env._cache_egg.path[path].metadata + set_name_and_version(self, metadata.name, metadata.version) + else: + metadata = self._get_metadata(path) + + # Need to be set before caching + set_name_and_version(self, metadata.name, metadata.version) + + if env and env._cache_enabled: + env._cache_egg.add(self) + super(EggInfoDistribution, self).__init__(metadata, path, env) + + def _get_metadata(self, path): + requires = None + + def parse_requires_data(data): + """Create a list of dependencies from a requires.txt file. + + *data*: the contents of a setuptools-produced requires.txt file. + """ + reqs = [] + lines = data.splitlines() + for line in lines: + line = line.strip() + if line.startswith('['): + logger.warning('Unexpected line: quitting requirement scan: %r', + line) + break + r = parse_requirement(line) + if not r: + logger.warning('Not recognised as a requirement: %r', line) + continue + if r.extras: + logger.warning('extra requirements in requires.txt are ' + 'not supported') + if not r.constraints: + reqs.append(r.name) + else: + cons = ', '.join('%s%s' % c for c in r.constraints) + reqs.append('%s (%s)' % (r.name, cons)) + return reqs + + def parse_requires_path(req_path): + """Create a list of dependencies from a requires.txt file. + + *req_path*: the path to a setuptools-produced requires.txt file. + """ + + reqs = [] + try: + with codecs.open(req_path, 'r', 'utf-8') as fp: + reqs = parse_requires_data(fp.read()) + except IOError: + pass + return reqs + + tl_path = tl_data = None + if path.endswith('.egg'): + if os.path.isdir(path): + p = os.path.join(path, 'EGG-INFO') + meta_path = os.path.join(p, 'PKG-INFO') + metadata = Metadata(path=meta_path, scheme='legacy') + req_path = os.path.join(p, 'requires.txt') + tl_path = os.path.join(p, 'top_level.txt') + requires = parse_requires_path(req_path) + else: + # FIXME handle the case where zipfile is not available + zipf = zipimport.zipimporter(path) + fileobj = StringIO( + zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + metadata = Metadata(fileobj=fileobj, scheme='legacy') + try: + data = zipf.get_data('EGG-INFO/requires.txt') + tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') + requires = parse_requires_data(data.decode('utf-8')) + except IOError: + requires = None + elif path.endswith('.egg-info'): + if os.path.isdir(path): + req_path = os.path.join(path, 'requires.txt') + requires = parse_requires_path(req_path) + path = os.path.join(path, 'PKG-INFO') + tl_path = os.path.join(path, 'top_level.txt') + metadata = Metadata(path=path, scheme='legacy') + else: + raise DistlibException('path must end with .egg-info or .egg, ' + 'got %r' % path) + + if requires: + metadata.add_requirements(requires) + # look for top-level modules in top_level.txt, if present + if tl_data is None: + if tl_path is not None and os.path.exists(tl_path): + with open(tl_path, 'rb') as f: + tl_data = f.read().decode('utf-8') + if not tl_data: + tl_data = [] + else: + tl_data = tl_data.splitlines() + self.modules = tl_data + return metadata + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + for path, _, _ in self.list_installed_files(): + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + return mismatches + + def list_installed_files(self): + """ + Iterates over the ``installed-files.txt`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: a list of (path, hash, size) + """ + + def _md5(path): + f = open(path, 'rb') + try: + content = f.read() + finally: + f.close() + return hashlib.md5(content).hexdigest() + + def _size(path): + return os.stat(path).st_size + + record_path = os.path.join(self.path, 'installed-files.txt') + result = [] + if os.path.exists(record_path): + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + p = os.path.normpath(os.path.join(self.path, line)) + # "./" is present as a marker between installed files + # and installation metadata files + if not os.path.exists(p): + logger.warning('Non-existent file: %s', p) + if p.endswith(('.pyc', '.pyo')): + continue + #otherwise fall through and fail + if not os.path.isdir(p): + result.append((p, _md5(p), _size(p))) + result.append((record_path, None, None)) + return result + + def list_distinfo_files(self, absolute=False): + """ + Iterates over the ``installed-files.txt`` entries and returns paths for + each line if the path is pointing to a file located in the + ``.egg-info`` directory or one of its subdirectories. + + :parameter absolute: If *absolute* is ``True``, each returned path is + transformed into a local absolute path. Otherwise the + raw value from ``installed-files.txt`` is returned. + :type absolute: boolean + :returns: iterator of paths + """ + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + skip = True + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + if line == './': + skip = False + continue + if not skip: + p = os.path.normpath(os.path.join(self.path, line)) + if p.startswith(self.path): + if absolute: + yield p + else: + yield line + + def __eq__(self, other): + return (isinstance(other, EggInfoDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + +new_dist_class = InstalledDistribution +old_dist_class = EggInfoDistribution + + +class DependencyGraph(object): + """ + Represents a dependency graph between distributions. + + The dependency relationships are stored in an ``adjacency_list`` that maps + distributions to a list of ``(other, label)`` tuples where ``other`` + is a distribution and the edge is labeled with ``label`` (i.e. the version + specifier, if such was provided). Also, for more efficient traversal, for + every distribution ``x``, a list of predecessors is kept in + ``reverse_list[x]``. An edge from distribution ``a`` to + distribution ``b`` means that ``a`` depends on ``b``. If any missing + dependencies are found, they are stored in ``missing``, which is a + dictionary that maps distributions to a list of requirements that were not + provided by any other distributions. + """ + + def __init__(self): + self.adjacency_list = {} + self.reverse_list = {} + self.missing = {} + + def add_distribution(self, distribution): + """Add the *distribution* to the graph. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + """ + self.adjacency_list[distribution] = [] + self.reverse_list[distribution] = [] + #self.missing[distribution] = [] + + def add_edge(self, x, y, label=None): + """Add an edge from distribution *x* to distribution *y* with the given + *label*. + + :type x: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type y: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type label: ``str`` or ``None`` + """ + self.adjacency_list[x].append((y, label)) + # multiple edges are allowed, so be careful + if x not in self.reverse_list[y]: + self.reverse_list[y].append(x) + + def add_missing(self, distribution, requirement): + """ + Add a missing *requirement* for the given *distribution*. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + :type requirement: ``str`` + """ + logger.debug('%s missing %r', distribution, requirement) + self.missing.setdefault(distribution, []).append(requirement) + + def _repr_dist(self, dist): + return '%s %s' % (dist.name, dist.version) + + def repr_node(self, dist, level=1): + """Prints only a subgraph""" + output = [self._repr_dist(dist)] + for other, label in self.adjacency_list[dist]: + dist = self._repr_dist(other) + if label is not None: + dist = '%s [%s]' % (dist, label) + output.append(' ' * level + str(dist)) + suboutput = self.repr_node(other, level + 1) + subs = suboutput.split('\n') + output.extend(subs[1:]) + return '\n'.join(output) + + def to_dot(self, f, skip_disconnected=True): + """Writes a DOT output for the graph to the provided file *f*. + + If *skip_disconnected* is set to ``True``, then all distributions + that are not dependent on any other distribution are skipped. + + :type f: has to support ``file``-like operations + :type skip_disconnected: ``bool`` + """ + disconnected = [] + + f.write("digraph dependencies {\n") + for dist, adjs in self.adjacency_list.items(): + if len(adjs) == 0 and not skip_disconnected: + disconnected.append(dist) + for other, label in adjs: + if not label is None: + f.write('"%s" -> "%s" [label="%s"]\n' % + (dist.name, other.name, label)) + else: + f.write('"%s" -> "%s"\n' % (dist.name, other.name)) + if not skip_disconnected and len(disconnected) > 0: + f.write('subgraph disconnected {\n') + f.write('label = "Disconnected"\n') + f.write('bgcolor = red\n') + + for dist in disconnected: + f.write('"%s"' % dist.name) + f.write('\n') + f.write('}\n') + f.write('}\n') + + def topological_sort(self): + """ + Perform a topological sort of the graph. + :return: A tuple, the first element of which is a topologically sorted + list of distributions, and the second element of which is a + list of distributions that cannot be sorted because they have + circular dependencies and so form a cycle. + """ + result = [] + # Make a shallow copy of the adjacency list + alist = {} + for k, v in self.adjacency_list.items(): + alist[k] = v[:] + while True: + # See what we can remove in this run + to_remove = [] + for k, v in list(alist.items())[:]: + if not v: + to_remove.append(k) + del alist[k] + if not to_remove: + # What's left in alist (if anything) is a cycle. + break + # Remove from the adjacency list of others + for k, v in alist.items(): + alist[k] = [(d, r) for d, r in v if d not in to_remove] + logger.debug('Moving to result: %s', + ['%s (%s)' % (d.name, d.version) for d in to_remove]) + result.extend(to_remove) + return result, list(alist.keys()) + + def __repr__(self): + """Representation of the graph""" + output = [] + for dist, adjs in self.adjacency_list.items(): + output.append(self.repr_node(dist)) + return '\n'.join(output) + + +def make_graph(dists, scheme='default'): + """Makes a dependency graph from the given distributions. + + :parameter dists: a list of distributions + :type dists: list of :class:`distutils2.database.InstalledDistribution` and + :class:`distutils2.database.EggInfoDistribution` instances + :rtype: a :class:`DependencyGraph` instance + """ + scheme = get_scheme(scheme) + graph = DependencyGraph() + provided = {} # maps names to lists of (version, dist) tuples + + # first, build the graph and find out what's provided + for dist in dists: + graph.add_distribution(dist) + + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + provided.setdefault(name, []).append((version, dist)) + + # now make the edges + for dist in dists: + requires = (dist.run_requires | dist.meta_requires | + dist.build_requires | dist.dev_requires) + for req in requires: + try: + matcher = scheme.matcher(req) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + matched = False + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + graph.add_edge(dist, provider, req) + matched = True + break + if not matched: + graph.add_missing(dist, req) + return graph + + +def get_dependent_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + dependent on *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + dep = [dist] # dependent distributions + todo = graph.reverse_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop() + dep.append(d) + for succ in graph.reverse_list[d]: + if succ not in dep: + todo.append(succ) + + dep.pop(0) # remove dist from dep, was there to prevent infinite loops + return dep + + +def get_required_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + required by *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + req = [] # required distributions + todo = graph.adjacency_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop()[0] + req.append(d) + for pred in graph.adjacency_list[d]: + if pred not in req: + todo.append(pred) + + return req + + +def make_dist(name, version, **kwargs): + """ + A convenience method for making a dist given just a name and version. + """ + summary = kwargs.pop('summary', 'Placeholder for summary') + md = Metadata(**kwargs) + md.name = name + md.version = version + md.summary = summary or 'Placeholder for summary' + return Distribution(md) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/index.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/index.py new file mode 100644 index 0000000..b1fbbf8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/index.py @@ -0,0 +1,509 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import hashlib +import logging +import os +import shutil +import subprocess +import tempfile +try: + from threading import Thread +except ImportError: + from dummy_threading import Thread + +from . import DistlibException +from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, + urlparse, build_opener, string_types) +from .util import zip_dir, ServerProxy + +logger = logging.getLogger(__name__) + +DEFAULT_INDEX = 'https://pypi.org/pypi' +DEFAULT_REALM = 'pypi' + +class PackageIndex(object): + """ + This class represents a package index compatible with PyPI, the Python + Package Index. + """ + + boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' + + def __init__(self, url=None): + """ + Initialise an instance. + + :param url: The URL of the index. If not specified, the URL for PyPI is + used. + """ + self.url = url or DEFAULT_INDEX + self.read_configuration() + scheme, netloc, path, params, query, frag = urlparse(self.url) + if params or query or frag or scheme not in ('http', 'https'): + raise DistlibException('invalid repository: %s' % self.url) + self.password_handler = None + self.ssl_verifier = None + self.gpg = None + self.gpg_home = None + with open(os.devnull, 'w') as sink: + # Use gpg by default rather than gpg2, as gpg2 insists on + # prompting for passwords + for s in ('gpg', 'gpg2'): + try: + rc = subprocess.check_call([s, '--version'], stdout=sink, + stderr=sink) + if rc == 0: + self.gpg = s + break + except OSError: + pass + + def _get_pypirc_command(self): + """ + Get the distutils command for interacting with PyPI configurations. + :return: the command. + """ + from .util import _get_pypirc_command as cmd + return cmd() + + def read_configuration(self): + """ + Read the PyPI access configuration as supported by distutils. This populates + ``username``, ``password``, ``realm`` and ``url`` attributes from the + configuration. + """ + from .util import _load_pypirc + cfg = _load_pypirc(self) + self.username = cfg.get('username') + self.password = cfg.get('password') + self.realm = cfg.get('realm', 'pypi') + self.url = cfg.get('repository', self.url) + + def save_configuration(self): + """ + Save the PyPI access configuration. You must have set ``username`` and + ``password`` attributes before calling this method. + """ + self.check_credentials() + from .util import _store_pypirc + _store_pypirc(self) + + def check_credentials(self): + """ + Check that ``username`` and ``password`` have been set, and raise an + exception if not. + """ + if self.username is None or self.password is None: + raise DistlibException('username and password must be set') + pm = HTTPPasswordMgr() + _, netloc, _, _, _, _ = urlparse(self.url) + pm.add_password(self.realm, netloc, self.username, self.password) + self.password_handler = HTTPBasicAuthHandler(pm) + + def register(self, metadata): + """ + Register a distribution on PyPI, using the provided metadata. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the distribution to be + registered. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + metadata.validate() + d = metadata.todict() + d[':action'] = 'verify' + request = self.encode_request(d.items(), []) + response = self.send_request(request) + d[':action'] = 'submit' + request = self.encode_request(d.items(), []) + return self.send_request(request) + + def _reader(self, name, stream, outbuf): + """ + Thread runner for reading lines of from a subprocess into a buffer. + + :param name: The logical name of the stream (used for logging only). + :param stream: The stream to read from. This will typically a pipe + connected to the output stream of a subprocess. + :param outbuf: The list to append the read lines to. + """ + while True: + s = stream.readline() + if not s: + break + s = s.decode('utf-8').rstrip() + outbuf.append(s) + logger.debug('%s: %s' % (name, s)) + stream.close() + + def get_sign_command(self, filename, signer, sign_password, + keystore=None): + """ + Return a suitable command for signing a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The signing command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + if sign_password is not None: + cmd.extend(['--batch', '--passphrase-fd', '0']) + td = tempfile.mkdtemp() + sf = os.path.join(td, os.path.basename(filename) + '.asc') + cmd.extend(['--detach-sign', '--armor', '--local-user', + signer, '--output', sf, filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd, sf + + def run_command(self, cmd, input_data=None): + """ + Run a command in a child process , passing it any input data specified. + + :param cmd: The command to run. + :param input_data: If specified, this must be a byte string containing + data to be sent to the child process. + :return: A tuple consisting of the subprocess' exit code, a list of + lines read from the subprocess' ``stdout``, and a list of + lines read from the subprocess' ``stderr``. + """ + kwargs = { + 'stdout': subprocess.PIPE, + 'stderr': subprocess.PIPE, + } + if input_data is not None: + kwargs['stdin'] = subprocess.PIPE + stdout = [] + stderr = [] + p = subprocess.Popen(cmd, **kwargs) + # We don't use communicate() here because we may need to + # get clever with interacting with the command + t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) + t1.start() + t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) + t2.start() + if input_data is not None: + p.stdin.write(input_data) + p.stdin.close() + + p.wait() + t1.join() + t2.join() + return p.returncode, stdout, stderr + + def sign_file(self, filename, signer, sign_password, keystore=None): + """ + Sign a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The absolute pathname of the file where the signature is + stored. + """ + cmd, sig_file = self.get_sign_command(filename, signer, sign_password, + keystore) + rc, stdout, stderr = self.run_command(cmd, + sign_password.encode('utf-8')) + if rc != 0: + raise DistlibException('sign command failed with error ' + 'code %s' % rc) + return sig_file + + def upload_file(self, metadata, filename, signer=None, sign_password=None, + filetype='sdist', pyversion='source', keystore=None): + """ + Upload a release file to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the file to be uploaded. + :param filename: The pathname of the file to be uploaded. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param filetype: The type of the file being uploaded. This is the + distutils command which produced that file, e.g. + ``sdist`` or ``bdist_wheel``. + :param pyversion: The version of Python which the release relates + to. For code compatible with any Python, this would + be ``source``, otherwise it would be e.g. ``3.2``. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.exists(filename): + raise DistlibException('not found: %s' % filename) + metadata.validate() + d = metadata.todict() + sig_file = None + if signer: + if not self.gpg: + logger.warning('no signing program available - not signed') + else: + sig_file = self.sign_file(filename, signer, sign_password, + keystore) + with open(filename, 'rb') as f: + file_data = f.read() + md5_digest = hashlib.md5(file_data).hexdigest() + sha256_digest = hashlib.sha256(file_data).hexdigest() + d.update({ + ':action': 'file_upload', + 'protocol_version': '1', + 'filetype': filetype, + 'pyversion': pyversion, + 'md5_digest': md5_digest, + 'sha256_digest': sha256_digest, + }) + files = [('content', os.path.basename(filename), file_data)] + if sig_file: + with open(sig_file, 'rb') as f: + sig_data = f.read() + files.append(('gpg_signature', os.path.basename(sig_file), + sig_data)) + shutil.rmtree(os.path.dirname(sig_file)) + request = self.encode_request(d.items(), files) + return self.send_request(request) + + def upload_documentation(self, metadata, doc_dir): + """ + Upload documentation to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the documentation to be + uploaded. + :param doc_dir: The pathname of the directory which contains the + documentation. This should be the directory that + contains the ``index.html`` for the documentation. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.isdir(doc_dir): + raise DistlibException('not a directory: %r' % doc_dir) + fn = os.path.join(doc_dir, 'index.html') + if not os.path.exists(fn): + raise DistlibException('not found: %r' % fn) + metadata.validate() + name, version = metadata.name, metadata.version + zip_data = zip_dir(doc_dir).getvalue() + fields = [(':action', 'doc_upload'), + ('name', name), ('version', version)] + files = [('content', name, zip_data)] + request = self.encode_request(fields, files) + return self.send_request(request) + + def get_verify_command(self, signature_filename, data_filename, + keystore=None): + """ + Return a suitable command for verifying a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The verifying command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + cmd.extend(['--verify', signature_filename, data_filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd + + def verify_signature(self, signature_filename, data_filename, + keystore=None): + """ + Verify a signature for a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: True if the signature was verified, else False. + """ + if not self.gpg: + raise DistlibException('verification unavailable because gpg ' + 'unavailable') + cmd = self.get_verify_command(signature_filename, data_filename, + keystore) + rc, stdout, stderr = self.run_command(cmd) + if rc not in (0, 1): + raise DistlibException('verify command failed with error ' + 'code %s' % rc) + return rc == 0 + + def download_file(self, url, destfile, digest=None, reporthook=None): + """ + This is a convenience method for downloading a file from an URL. + Normally, this will be a file from the index, though currently + no check is made for this (i.e. a file can be downloaded from + anywhere). + + The method is just like the :func:`urlretrieve` function in the + standard library, except that it allows digest computation to be + done during download and checking that the downloaded data + matched any expected value. + + :param url: The URL of the file to be downloaded (assumed to be + available via an HTTP GET request). + :param destfile: The pathname where the downloaded file is to be + saved. + :param digest: If specified, this must be a (hasher, value) + tuple, where hasher is the algorithm used (e.g. + ``'md5'``) and ``value`` is the expected value. + :param reporthook: The same as for :func:`urlretrieve` in the + standard library. + """ + if digest is None: + digester = None + logger.debug('No digest specified') + else: + if isinstance(digest, (list, tuple)): + hasher, digest = digest + else: + hasher = 'md5' + digester = getattr(hashlib, hasher)() + logger.debug('Digest specified: %s' % digest) + # The following code is equivalent to urlretrieve. + # We need to do it this way so that we can compute the + # digest of the file as we go. + with open(destfile, 'wb') as dfp: + # addinfourl is not a context manager on 2.x + # so we have to use try/finally + sfp = self.send_request(Request(url)) + try: + headers = sfp.info() + blocksize = 8192 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, blocksize, size) + while True: + block = sfp.read(blocksize) + if not block: + break + read += len(block) + dfp.write(block) + if digester: + digester.update(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, blocksize, size) + finally: + sfp.close() + + # check that we got the whole file, if we can + if size >= 0 and read < size: + raise DistlibException( + 'retrieval incomplete: got only %d out of %d bytes' + % (read, size)) + # if we have a digest, it must match. + if digester: + actual = digester.hexdigest() + if digest != actual: + raise DistlibException('%s digest mismatch for %s: expected ' + '%s, got %s' % (hasher, destfile, + digest, actual)) + logger.debug('Digest verified: %s', digest) + + def send_request(self, req): + """ + Send a standard library :class:`Request` to PyPI and return its + response. + + :param req: The request to send. + :return: The HTTP response from PyPI (a standard library HTTPResponse). + """ + handlers = [] + if self.password_handler: + handlers.append(self.password_handler) + if self.ssl_verifier: + handlers.append(self.ssl_verifier) + opener = build_opener(*handlers) + return opener.open(req) + + def encode_request(self, fields, files): + """ + Encode fields and files for posting to an HTTP server. + + :param fields: The fields to send as a list of (fieldname, value) + tuples. + :param files: The files to send as a list of (fieldname, filename, + file_bytes) tuple. + """ + # Adapted from packaging, which in turn was adapted from + # http://code.activestate.com/recipes/146306 + + parts = [] + boundary = self.boundary + for k, values in fields: + if not isinstance(values, (list, tuple)): + values = [values] + + for v in values: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"' % + k).encode('utf-8'), + b'', + v.encode('utf-8'))) + for key, filename, value in files: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"; filename="%s"' % + (key, filename)).encode('utf-8'), + b'', + value)) + + parts.extend((b'--' + boundary + b'--', b'')) + + body = b'\r\n'.join(parts) + ct = b'multipart/form-data; boundary=' + boundary + headers = { + 'Content-type': ct, + 'Content-length': str(len(body)) + } + return Request(self.url, body, headers) + + def search(self, terms, operator=None): + if isinstance(terms, string_types): + terms = {'name': terms} + rpc_proxy = ServerProxy(self.url, timeout=3.0) + try: + return rpc_proxy.search(terms, operator or 'and') + finally: + rpc_proxy('close')() diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/locators.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/locators.py new file mode 100644 index 0000000..0c7d639 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/locators.py @@ -0,0 +1,1300 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# + +import gzip +from io import BytesIO +import json +import logging +import os +import posixpath +import re +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import zlib + +from . import DistlibException +from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, + queue, quote, unescape, build_opener, + HTTPRedirectHandler as BaseRedirectHandler, text_type, + Request, HTTPError, URLError) +from .database import Distribution, DistributionPath, make_dist +from .metadata import Metadata, MetadataInvalidError +from .util import (cached_property, ensure_slash, split_filename, get_project_data, + parse_requirement, parse_name_and_version, ServerProxy, + normalize_name) +from .version import get_scheme, UnsupportedVersionError +from .wheel import Wheel, is_compatible + +logger = logging.getLogger(__name__) + +HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') +CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) +HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') +DEFAULT_INDEX = 'https://pypi.org/pypi' + +def get_all_distribution_names(url=None): + """ + Return all distribution names known by an index. + :param url: The URL of the index. + :return: A list of all known distribution names. + """ + if url is None: + url = DEFAULT_INDEX + client = ServerProxy(url, timeout=3.0) + try: + return client.list_packages() + finally: + client('close')() + +class RedirectHandler(BaseRedirectHandler): + """ + A class to work around a bug in some Python 3.2.x releases. + """ + # There's a bug in the base version for some 3.2.x + # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header + # returns e.g. /abc, it bails because it says the scheme '' + # is bogus, when actually it should use the request's + # URL for the scheme. See Python issue #13696. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + newurl = None + for key in ('location', 'uri'): + if key in headers: + newurl = headers[key] + break + if newurl is None: # pragma: no cover + return + urlparts = urlparse(newurl) + if urlparts.scheme == '': + newurl = urljoin(req.get_full_url(), newurl) + if hasattr(headers, 'replace_header'): + headers.replace_header(key, newurl) + else: + headers[key] = newurl + return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, + headers) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + +class Locator(object): + """ + A base class for locators - things that locate distributions. + """ + source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') + binary_extensions = ('.egg', '.exe', '.whl') + excluded_extensions = ('.pdf',) + + # A list of tags indicating which wheels you want to match. The default + # value of None matches against the tags compatible with the running + # Python. If you want to match other values, set wheel_tags on a locator + # instance to a list of tuples (pyver, abi, arch) which you want to match. + wheel_tags = None + + downloadable_extensions = source_extensions + ('.whl',) + + def __init__(self, scheme='default'): + """ + Initialise an instance. + :param scheme: Because locators look for most recent versions, they + need to know the version scheme to use. This specifies + the current PEP-recommended scheme - use ``'legacy'`` + if you need to support existing distributions on PyPI. + """ + self._cache = {} + self.scheme = scheme + # Because of bugs in some of the handlers on some of the platforms, + # we use our own opener rather than just using urlopen. + self.opener = build_opener(RedirectHandler()) + # If get_project() is called from locate(), the matcher instance + # is set from the requirement passed to locate(). See issue #18 for + # why this can be useful to know. + self.matcher = None + self.errors = queue.Queue() + + def get_errors(self): + """ + Return any errors which have occurred. + """ + result = [] + while not self.errors.empty(): # pragma: no cover + try: + e = self.errors.get(False) + result.append(e) + except self.errors.Empty: + continue + self.errors.task_done() + return result + + def clear_errors(self): + """ + Clear any errors which may have been logged. + """ + # Just get the errors and throw them away + self.get_errors() + + def clear_cache(self): + self._cache.clear() + + def _get_scheme(self): + return self._scheme + + def _set_scheme(self, value): + self._scheme = value + + scheme = property(_get_scheme, _set_scheme) + + def _get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This should be implemented in subclasses. + + If called from a locate() request, self.matcher will be set to a + matcher for the requirement to satisfy, otherwise it will be None. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This calls _get_project to do all the work, and just implements a caching layer on top. + """ + if self._cache is None: # pragma: no cover + result = self._get_project(name) + elif name in self._cache: + result = self._cache[name] + else: + self.clear_errors() + result = self._get_project(name) + self._cache[name] = result + return result + + def score_url(self, url): + """ + Give an url a score which can be used to choose preferred URLs + for a given project release. + """ + t = urlparse(url) + basename = posixpath.basename(t.path) + compatible = True + is_wheel = basename.endswith('.whl') + is_downloadable = basename.endswith(self.downloadable_extensions) + if is_wheel: + compatible = is_compatible(Wheel(basename), self.wheel_tags) + return (t.scheme == 'https', 'pypi.org' in t.netloc, + is_downloadable, is_wheel, compatible, basename) + + def prefer_url(self, url1, url2): + """ + Choose one of two URLs where both are candidates for distribution + archives for the same version of a distribution (for example, + .tar.gz vs. zip). + + The current implementation favours https:// URLs over http://, archives + from PyPI over those from other locations, wheel compatibility (if a + wheel) and then the archive name. + """ + result = url2 + if url1: + s1 = self.score_url(url1) + s2 = self.score_url(url2) + if s1 > s2: + result = url1 + if result != url2: + logger.debug('Not replacing %r with %r', url1, url2) + else: + logger.debug('Replacing %r with %r', url1, url2) + return result + + def split_filename(self, filename, project_name): + """ + Attempt to split a filename in project name, version and Python version. + """ + return split_filename(filename, project_name) + + def convert_url_to_download_info(self, url, project_name): + """ + See if a URL is a candidate for a download URL for a project (the URL + has typically been scraped from an HTML page). + + If it is, a dictionary is returned with keys "name", "version", + "filename" and "url"; otherwise, None is returned. + """ + def same_project(name1, name2): + return normalize_name(name1) == normalize_name(name2) + + result = None + scheme, netloc, path, params, query, frag = urlparse(url) + if frag.lower().startswith('egg='): # pragma: no cover + logger.debug('%s: version hint in fragment: %r', + project_name, frag) + m = HASHER_HASH.match(frag) + if m: + algo, digest = m.groups() + else: + algo, digest = None, None + origpath = path + if path and path[-1] == '/': # pragma: no cover + path = path[:-1] + if path.endswith('.whl'): + try: + wheel = Wheel(path) + if not is_compatible(wheel, self.wheel_tags): + logger.debug('Wheel not compatible: %s', path) + else: + if project_name is None: + include = True + else: + include = same_project(wheel.name, project_name) + if include: + result = { + 'name': wheel.name, + 'version': wheel.version, + 'filename': wheel.filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + 'python-version': ', '.join( + ['.'.join(list(v[2:])) for v in wheel.pyver]), + } + except Exception as e: # pragma: no cover + logger.warning('invalid path for wheel: %s', path) + elif not path.endswith(self.downloadable_extensions): # pragma: no cover + logger.debug('Not downloadable: %s', path) + else: # downloadable extension + path = filename = posixpath.basename(path) + for ext in self.downloadable_extensions: + if path.endswith(ext): + path = path[:-len(ext)] + t = self.split_filename(path, project_name) + if not t: # pragma: no cover + logger.debug('No match for project/version: %s', path) + else: + name, version, pyver = t + if not project_name or same_project(project_name, name): + result = { + 'name': name, + 'version': version, + 'filename': filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + #'packagetype': 'sdist', + } + if pyver: # pragma: no cover + result['python-version'] = pyver + break + if result and algo: + result['%s_digest' % algo] = digest + return result + + def _get_digest(self, info): + """ + Get a digest from a dictionary by looking at a "digests" dictionary + or keys of the form 'algo_digest'. + + Returns a 2-tuple (algo, digest) if found, else None. Currently + looks only for SHA256, then MD5. + """ + result = None + if 'digests' in info: + digests = info['digests'] + for algo in ('sha256', 'md5'): + if algo in digests: + result = (algo, digests[algo]) + break + if not result: + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break + return result + + def _update_version_data(self, result, info): + """ + Update a result dictionary (the final result from _get_project) with a + dictionary for a specific version, which typically holds information + gleaned from a filename or URL for an archive for the distribution. + """ + name = info.pop('name') + version = info.pop('version') + if version in result: + dist = result[version] + md = dist.metadata + else: + dist = make_dist(name, version, scheme=self.scheme) + md = dist.metadata + dist.digest = digest = self._get_digest(info) + url = info['url'] + result['digests'][url] = digest + if md.source_url != info['url']: + md.source_url = self.prefer_url(md.source_url, url) + result['urls'].setdefault(version, set()).add(url) + dist.locator = self + result[version] = dist + + def locate(self, requirement, prereleases=False): + """ + Find the most recent distribution which matches the given + requirement. + + :param requirement: A requirement of the form 'foo (1.0)' or perhaps + 'foo (>= 1.0, < 2.0, != 1.3)' + :param prereleases: If ``True``, allow pre-release versions + to be located. Otherwise, pre-release versions + are not returned. + :return: A :class:`Distribution` instance, or ``None`` if no such + distribution could be located. + """ + result = None + r = parse_requirement(requirement) + if r is None: # pragma: no cover + raise DistlibException('Not a valid requirement: %r' % requirement) + scheme = get_scheme(self.scheme) + self.matcher = matcher = scheme.matcher(r.requirement) + logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) + versions = self.get_project(r.name) + if len(versions) > 2: # urls and digests keys are present + # sometimes, versions are invalid + slist = [] + vcls = matcher.version_class + for k in versions: + if k in ('urls', 'digests'): + continue + try: + if not matcher.match(k): + pass # logger.debug('%s did not match %r', matcher, k) + else: + if prereleases or not vcls(k).is_prerelease: + slist.append(k) + # else: + # logger.debug('skipping pre-release ' + # 'version %s of %s', k, matcher.name) + except Exception: # pragma: no cover + logger.warning('error matching %s with %r', matcher, k) + pass # slist.append(k) + if len(slist) > 1: + slist = sorted(slist, key=scheme.key) + if slist: + logger.debug('sorted list: %s', slist) + version = slist[-1] + result = versions[version] + if result: + if r.extras: + result.extras = r.extras + result.download_urls = versions.get('urls', {}).get(version, set()) + d = {} + sd = versions.get('digests', {}) + for url in result.download_urls: + if url in sd: # pragma: no cover + d[url] = sd[url] + result.digests = d + self.matcher = None + return result + + +class PyPIRPCLocator(Locator): + """ + This locator uses XML-RPC to locate distributions. It therefore + cannot be used with simple mirrors (that only mirror file content). + """ + def __init__(self, url, **kwargs): + """ + Initialise an instance. + + :param url: The URL to use for XML-RPC. + :param kwargs: Passed to the superclass constructor. + """ + super(PyPIRPCLocator, self).__init__(**kwargs) + self.base_url = url + self.client = ServerProxy(url, timeout=3.0) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + return set(self.client.list_packages()) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + versions = self.client.package_releases(name, True) + for v in versions: + urls = self.client.release_urls(name, v) + data = self.client.release_data(name, v) + metadata = Metadata(scheme=self.scheme) + metadata.name = data['name'] + metadata.version = data['version'] + metadata.license = data.get('license') + metadata.keywords = data.get('keywords', []) + metadata.summary = data.get('summary') + dist = Distribution(metadata) + if urls: + info = urls[0] + metadata.source_url = info['url'] + dist.digest = self._get_digest(info) + dist.locator = self + result[v] = dist + for info in urls: + url = info['url'] + digest = self._get_digest(info) + result['urls'].setdefault(v, set()).add(url) + result['digests'][url] = digest + return result + +class PyPIJSONLocator(Locator): + """ + This locator uses PyPI's JSON interface. It's very limited in functionality + and probably not worth using. + """ + def __init__(self, url, **kwargs): + super(PyPIJSONLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + url = urljoin(self.base_url, '%s/json' % quote(name)) + try: + resp = self.opener.open(url) + data = resp.read().decode() # for now + d = json.loads(data) + md = Metadata(scheme=self.scheme) + data = d['info'] + md.name = data['name'] + md.version = data['version'] + md.license = data.get('license') + md.keywords = data.get('keywords', []) + md.summary = data.get('summary') + dist = Distribution(md) + dist.locator = self + urls = d['urls'] + result[md.version] = dist + for info in d['urls']: + url = info['url'] + dist.download_urls.add(url) + dist.digests[url] = self._get_digest(info) + result['urls'].setdefault(md.version, set()).add(url) + result['digests'][url] = self._get_digest(info) + # Now get other releases + for version, infos in d['releases'].items(): + if version == md.version: + continue # already done + omd = Metadata(scheme=self.scheme) + omd.name = md.name + omd.version = version + odist = Distribution(omd) + odist.locator = self + result[version] = odist + for info in infos: + url = info['url'] + odist.download_urls.add(url) + odist.digests[url] = self._get_digest(info) + result['urls'].setdefault(version, set()).add(url) + result['digests'][url] = self._get_digest(info) +# for info in urls: +# md.source_url = info['url'] +# dist.digest = self._get_digest(info) +# dist.locator = self +# for info in urls: +# url = info['url'] +# result['urls'].setdefault(md.version, set()).add(url) +# result['digests'][url] = self._get_digest(info) + except Exception as e: + self.errors.put(text_type(e)) + logger.exception('JSON fetch failed: %s', e) + return result + + +class Page(object): + """ + This class represents a scraped HTML page. + """ + # The following slightly hairy-looking regex just looks for the contents of + # an anchor link, which has an attribute "href" either immediately preceded + # or immediately followed by a "rel" attribute. The attribute values can be + # declared with double quotes, single quotes or no quotes - which leads to + # the length of the expression. + _href = re.compile(""" +(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)? +href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)) +(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))? +""", re.I | re.S | re.X) + _base = re.compile(r"""]+)""", re.I | re.S) + + def __init__(self, data, url): + """ + Initialise an instance with the Unicode page contents and the URL they + came from. + """ + self.data = data + self.base_url = self.url = url + m = self._base.search(self.data) + if m: + self.base_url = m.group(1) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + @cached_property + def links(self): + """ + Return the URLs of all the links on a page together with information + about their "rel" attribute, for determining which ones to treat as + downloads and which ones to queue for further scraping. + """ + def clean(url): + "Tidy up an URL." + scheme, netloc, path, params, query, frag = urlparse(url) + return urlunparse((scheme, netloc, quote(path), + params, query, frag)) + + result = set() + for match in self._href.finditer(self.data): + d = match.groupdict('') + rel = (d['rel1'] or d['rel2'] or d['rel3'] or + d['rel4'] or d['rel5'] or d['rel6']) + url = d['url1'] or d['url2'] or d['url3'] + url = urljoin(self.base_url, url) + url = unescape(url) + url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) + result.add((url, rel)) + # We sort the result, hoping to bring the most recent versions + # to the front + result = sorted(result, key=lambda t: t[0], reverse=True) + return result + + +class SimpleScrapingLocator(Locator): + """ + A locator which scrapes HTML pages to locate downloads for a distribution. + This runs multiple threads to do the I/O; performance is at least as good + as pip's PackageFinder, which works in an analogous fashion. + """ + + # These are used to deal with various Content-Encoding schemes. + decoders = { + 'deflate': zlib.decompress, + 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(b)).read(), + 'none': lambda b: b, + } + + def __init__(self, url, timeout=None, num_workers=10, **kwargs): + """ + Initialise an instance. + :param url: The root URL to use for scraping. + :param timeout: The timeout, in seconds, to be applied to requests. + This defaults to ``None`` (no timeout specified). + :param num_workers: The number of worker threads you want to do I/O, + This defaults to 10. + :param kwargs: Passed to the superclass. + """ + super(SimpleScrapingLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + self.timeout = timeout + self._page_cache = {} + self._seen = set() + self._to_fetch = queue.Queue() + self._bad_hosts = set() + self.skip_externals = False + self.num_workers = num_workers + self._lock = threading.RLock() + # See issue #45: we need to be resilient when the locator is used + # in a thread, e.g. with concurrent.futures. We can't use self._lock + # as it is for coordinating our internal threads - the ones created + # in _prepare_threads. + self._gplock = threading.RLock() + self.platform_check = False # See issue #112 + + def _prepare_threads(self): + """ + Threads are created only when get_project is called, and terminate + before it returns. They are there primarily to parallelise I/O (i.e. + fetching web pages). + """ + self._threads = [] + for i in range(self.num_workers): + t = threading.Thread(target=self._fetch) + t.setDaemon(True) + t.start() + self._threads.append(t) + + def _wait_threads(self): + """ + Tell all the threads to terminate (by sending a sentinel value) and + wait for them to do so. + """ + # Note that you need two loops, since you can't say which + # thread will get each sentinel + for t in self._threads: + self._to_fetch.put(None) # sentinel + for t in self._threads: + t.join() + self._threads = [] + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + with self._gplock: + self.result = result + self.project_name = name + url = urljoin(self.base_url, '%s/' % quote(name)) + self._seen.clear() + self._page_cache.clear() + self._prepare_threads() + try: + logger.debug('Queueing %s', url) + self._to_fetch.put(url) + self._to_fetch.join() + finally: + self._wait_threads() + del self.result + return result + + platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|' + r'win(32|_amd64)|macosx_?\d+)\b', re.I) + + def _is_platform_dependent(self, url): + """ + Does an URL refer to a platform-specific download? + """ + return self.platform_dependent.search(url) + + def _process_download(self, url): + """ + See if an URL is a suitable download for a project. + + If it is, register information in the result dictionary (for + _get_project) about the specific version it's for. + + Note that the return value isn't actually used other than as a boolean + value. + """ + if self.platform_check and self._is_platform_dependent(url): + info = None + else: + info = self.convert_url_to_download_info(url, self.project_name) + logger.debug('process_download: %s -> %s', url, info) + if info: + with self._lock: # needed because self.result is shared + self._update_version_data(self.result, info) + return info + + def _should_queue(self, link, referrer, rel): + """ + Determine whether a link URL from a referring page and with a + particular "rel" attribute should be queued for scraping. + """ + scheme, netloc, path, _, _, _ = urlparse(link) + if path.endswith(self.source_extensions + self.binary_extensions + + self.excluded_extensions): + result = False + elif self.skip_externals and not link.startswith(self.base_url): + result = False + elif not referrer.startswith(self.base_url): + result = False + elif rel not in ('homepage', 'download'): + result = False + elif scheme not in ('http', 'https', 'ftp'): + result = False + elif self._is_platform_dependent(link): + result = False + else: + host = netloc.split(':', 1)[0] + if host.lower() == 'localhost': + result = False + else: + result = True + logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, + referrer, result) + return result + + def _fetch(self): + """ + Get a URL to fetch from the work queue, get the HTML page, examine its + links for download candidates and candidates for further scraping. + + This is a handy method to run in a thread. + """ + while True: + url = self._to_fetch.get() + try: + if url: + page = self.get_page(url) + if page is None: # e.g. after an error + continue + for link, rel in page.links: + if link not in self._seen: + try: + self._seen.add(link) + if (not self._process_download(link) and + self._should_queue(link, url, rel)): + logger.debug('Queueing %s from %s', link, url) + self._to_fetch.put(link) + except MetadataInvalidError: # e.g. invalid versions + pass + except Exception as e: # pragma: no cover + self.errors.put(text_type(e)) + finally: + # always do this, to avoid hangs :-) + self._to_fetch.task_done() + if not url: + #logger.debug('Sentinel seen, quitting.') + break + + def get_page(self, url): + """ + Get the HTML for an URL, possibly from an in-memory cache. + + XXX TODO Note: this cache is never actually cleared. It's assumed that + the data won't get stale over the lifetime of a locator instance (not + necessarily true for the default_locator). + """ + # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api + scheme, netloc, path, _, _, _ = urlparse(url) + if scheme == 'file' and os.path.isdir(url2pathname(path)): + url = urljoin(ensure_slash(url), 'index.html') + + if url in self._page_cache: + result = self._page_cache[url] + logger.debug('Returning %s from cache: %s', url, result) + else: + host = netloc.split(':', 1)[0] + result = None + if host in self._bad_hosts: + logger.debug('Skipping %s due to bad host %s', url, host) + else: + req = Request(url, headers={'Accept-encoding': 'identity'}) + try: + logger.debug('Fetching %s', url) + resp = self.opener.open(req, timeout=self.timeout) + logger.debug('Fetched %s', url) + headers = resp.info() + content_type = headers.get('Content-Type', '') + if HTML_CONTENT_TYPE.match(content_type): + final_url = resp.geturl() + data = resp.read() + encoding = headers.get('Content-Encoding') + if encoding: + decoder = self.decoders[encoding] # fail if not found + data = decoder(data) + encoding = 'utf-8' + m = CHARSET.search(content_type) + if m: + encoding = m.group(1) + try: + data = data.decode(encoding) + except UnicodeError: # pragma: no cover + data = data.decode('latin-1') # fallback + result = Page(data, final_url) + self._page_cache[final_url] = result + except HTTPError as e: + if e.code != 404: + logger.exception('Fetch failed: %s: %s', url, e) + except URLError as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + with self._lock: + self._bad_hosts.add(host) + except Exception as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + finally: + self._page_cache[url] = result # even if None (failure) + return result + + _distname_re = re.compile(']*>([^<]+)<') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + page = self.get_page(self.base_url) + if not page: + raise DistlibException('Unable to get %s' % self.base_url) + for match in self._distname_re.finditer(page.data): + result.add(match.group(1)) + return result + +class DirectoryLocator(Locator): + """ + This class locates distributions in a directory tree. + """ + + def __init__(self, path, **kwargs): + """ + Initialise an instance. + :param path: The root of the directory tree to search. + :param kwargs: Passed to the superclass constructor, + except for: + * recursive - if True (the default), subdirectories are + recursed into. If False, only the top-level directory + is searched, + """ + self.recursive = kwargs.pop('recursive', True) + super(DirectoryLocator, self).__init__(**kwargs) + path = os.path.abspath(path) + if not os.path.isdir(path): # pragma: no cover + raise DistlibException('Not a directory: %r' % path) + self.base_dir = path + + def should_include(self, filename, parent): + """ + Should a filename be considered as a candidate for a distribution + archive? As well as the filename, the directory which contains it + is provided, though not used by the current implementation. + """ + return filename.endswith(self.downloadable_extensions) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, name) + if info: + self._update_version_data(result, info) + if not self.recursive: + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, None) + if info: + result.add(info['name']) + if not self.recursive: + break + return result + +class JSONLocator(Locator): + """ + This locator uses special extended metadata (not available on PyPI) and is + the basis of performant dependency resolution in distlib. Other locators + require archive downloads before dependencies can be determined! As you + might imagine, that can be slow. + """ + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + data = get_project_data(name) + if data: + for info in data.get('files', []): + if info['ptype'] != 'sdist' or info['pyversion'] != 'source': + continue + # We don't store summary in project metadata as it makes + # the data bigger for no benefit during dependency + # resolution + dist = make_dist(data['name'], info['version'], + summary=data.get('summary', + 'Placeholder for summary'), + scheme=self.scheme) + md = dist.metadata + md.source_url = info['url'] + # TODO SHA256 digest + if 'digest' in info and info['digest']: + dist.digest = ('md5', info['digest']) + md.dependencies = info.get('requirements', {}) + dist.exports = info.get('exports', {}) + result[dist.version] = dist + result['urls'].setdefault(dist.version, set()).add(info['url']) + return result + +class DistPathLocator(Locator): + """ + This locator finds installed distributions in a path. It can be useful for + adding to an :class:`AggregatingLocator`. + """ + def __init__(self, distpath, **kwargs): + """ + Initialise an instance. + + :param distpath: A :class:`DistributionPath` instance to search. + """ + super(DistPathLocator, self).__init__(**kwargs) + assert isinstance(distpath, DistributionPath) + self.distpath = distpath + + def _get_project(self, name): + dist = self.distpath.get_distribution(name) + if dist is None: + result = {'urls': {}, 'digests': {}} + else: + result = { + dist.version: dist, + 'urls': {dist.version: set([dist.source_url])}, + 'digests': {dist.version: set([None])} + } + return result + + +class AggregatingLocator(Locator): + """ + This class allows you to chain and/or merge a list of locators. + """ + def __init__(self, *locators, **kwargs): + """ + Initialise an instance. + + :param locators: The list of locators to search. + :param kwargs: Passed to the superclass constructor, + except for: + * merge - if False (the default), the first successful + search from any of the locators is returned. If True, + the results from all locators are merged (this can be + slow). + """ + self.merge = kwargs.pop('merge', False) + self.locators = locators + super(AggregatingLocator, self).__init__(**kwargs) + + def clear_cache(self): + super(AggregatingLocator, self).clear_cache() + for locator in self.locators: + locator.clear_cache() + + def _set_scheme(self, value): + self._scheme = value + for locator in self.locators: + locator.scheme = value + + scheme = property(Locator.scheme.fget, _set_scheme) + + def _get_project(self, name): + result = {} + for locator in self.locators: + d = locator.get_project(name) + if d: + if self.merge: + files = result.get('urls', {}) + digests = result.get('digests', {}) + # next line could overwrite result['urls'], result['digests'] + result.update(d) + df = result.get('urls') + if files and df: + for k, v in files.items(): + if k in df: + df[k] |= v + else: + df[k] = v + dd = result.get('digests') + if digests and dd: + dd.update(digests) + else: + # See issue #18. If any dists are found and we're looking + # for specific constraints, we only return something if + # a match is found. For example, if a DirectoryLocator + # returns just foo (1.0) while we're looking for + # foo (>= 2.0), we'll pretend there was nothing there so + # that subsequent locators can be queried. Otherwise we + # would just return foo (1.0) which would then lead to a + # failure to find foo (>= 2.0), because other locators + # weren't searched. Note that this only matters when + # merge=False. + if self.matcher is None: + found = True + else: + found = False + for k in d: + if self.matcher.match(k): + found = True + break + if found: + result = d + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for locator in self.locators: + try: + result |= locator.get_distribution_names() + except NotImplementedError: + pass + return result + + +# We use a legacy scheme simply because most of the dists on PyPI use legacy +# versions which don't conform to PEP 426 / PEP 440. +default_locator = AggregatingLocator( + JSONLocator(), + SimpleScrapingLocator('https://pypi.org/simple/', + timeout=3.0), + scheme='legacy') + +locate = default_locator.locate + + +class DependencyFinder(object): + """ + Locate dependencies for distributions. + """ + + def __init__(self, locator=None): + """ + Initialise an instance, using the specified locator + to locate distributions. + """ + self.locator = locator or default_locator + self.scheme = get_scheme(self.locator.scheme) + + def add_distribution(self, dist): + """ + Add a distribution to the finder. This will update internal information + about who provides what. + :param dist: The distribution to add. + """ + logger.debug('adding distribution %s', dist) + name = dist.key + self.dists_by_name[name] = dist + self.dists[(name, dist.version)] = dist + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + self.provided.setdefault(name, set()).add((version, dist)) + + def remove_distribution(self, dist): + """ + Remove a distribution from the finder. This will update internal + information about who provides what. + :param dist: The distribution to remove. + """ + logger.debug('removing distribution %s', dist) + name = dist.key + del self.dists_by_name[name] + del self.dists[(name, dist.version)] + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Remove from provided: %s, %s, %s', name, version, dist) + s = self.provided[name] + s.remove((version, dist)) + if not s: + del self.provided[name] + + def get_matcher(self, reqt): + """ + Get a version matcher for a requirement. + :param reqt: The requirement + :type reqt: str + :return: A version matcher (an instance of + :class:`distlib.version.Matcher`). + """ + try: + matcher = self.scheme.matcher(reqt) + except UnsupportedVersionError: # pragma: no cover + # XXX compat-mode if cannot read the version + name = reqt.split()[0] + matcher = self.scheme.matcher(name) + return matcher + + def find_providers(self, reqt): + """ + Find the distributions which can fulfill a requirement. + + :param reqt: The requirement. + :type reqt: str + :return: A set of distribution which can fulfill the requirement. + """ + matcher = self.get_matcher(reqt) + name = matcher.key # case-insensitive + result = set() + provided = self.provided + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + result.add(provider) + break + return result + + def try_to_replace(self, provider, other, problems): + """ + Attempt to replace one provider with another. This is typically used + when resolving dependencies from multiple sources, e.g. A requires + (B >= 1.0) while C requires (B >= 1.1). + + For successful replacement, ``provider`` must meet all the requirements + which ``other`` fulfills. + + :param provider: The provider we are trying to replace with. + :param other: The provider we're trying to replace. + :param problems: If False is returned, this will contain what + problems prevented replacement. This is currently + a tuple of the literal string 'cantreplace', + ``provider``, ``other`` and the set of requirements + that ``provider`` couldn't fulfill. + :return: True if we can replace ``other`` with ``provider``, else + False. + """ + rlist = self.reqts[other] + unmatched = set() + for s in rlist: + matcher = self.get_matcher(s) + if not matcher.match(provider.version): + unmatched.add(s) + if unmatched: + # can't replace other with provider + problems.add(('cantreplace', provider, other, + frozenset(unmatched))) + result = False + else: + # can replace other with provider + self.remove_distribution(other) + del self.reqts[other] + for s in rlist: + self.reqts.setdefault(provider, set()).add(s) + self.add_distribution(provider) + result = True + return result + + def find(self, requirement, meta_extras=None, prereleases=False): + """ + Find a distribution and all distributions it depends on. + + :param requirement: The requirement specifying the distribution to + find, or a Distribution instance. + :param meta_extras: A list of meta extras such as :test:, :build: and + so on. + :param prereleases: If ``True``, allow pre-release versions to be + returned - otherwise, don't return prereleases + unless they're all that's available. + + Return a set of :class:`Distribution` instances and a set of + problems. + + The distributions returned should be such that they have the + :attr:`required` attribute set to ``True`` if they were + from the ``requirement`` passed to ``find()``, and they have the + :attr:`build_time_dependency` attribute set to ``True`` unless they + are post-installation dependencies of the ``requirement``. + + The problems should be a tuple consisting of the string + ``'unsatisfied'`` and the requirement which couldn't be satisfied + by any distribution known to the locator. + """ + + self.provided = {} + self.dists = {} + self.dists_by_name = {} + self.reqts = {} + + meta_extras = set(meta_extras or []) + if ':*:' in meta_extras: + meta_extras.remove(':*:') + # :meta: and :run: are implicitly included + meta_extras |= set([':test:', ':build:', ':dev:']) + + if isinstance(requirement, Distribution): + dist = odist = requirement + logger.debug('passed %s as requirement', odist) + else: + dist = odist = self.locator.locate(requirement, + prereleases=prereleases) + if dist is None: + raise DistlibException('Unable to locate %r' % requirement) + logger.debug('located %s', odist) + dist.requested = True + problems = set() + todo = set([dist]) + install_dists = set([odist]) + while todo: + dist = todo.pop() + name = dist.key # case-insensitive + if name not in self.dists_by_name: + self.add_distribution(dist) + else: + #import pdb; pdb.set_trace() + other = self.dists_by_name[name] + if other != dist: + self.try_to_replace(dist, other, problems) + + ireqts = dist.run_requires | dist.meta_requires + sreqts = dist.build_requires + ereqts = set() + if meta_extras and dist in install_dists: + for key in ('test', 'build', 'dev'): + e = ':%s:' % key + if e in meta_extras: + ereqts |= getattr(dist, '%s_requires' % key) + all_reqts = ireqts | sreqts | ereqts + for r in all_reqts: + providers = self.find_providers(r) + if not providers: + logger.debug('No providers found for %r', r) + provider = self.locator.locate(r, prereleases=prereleases) + # If no provider is found and we didn't consider + # prereleases, consider them now. + if provider is None and not prereleases: + provider = self.locator.locate(r, prereleases=True) + if provider is None: + logger.debug('Cannot satisfy %r', r) + problems.add(('unsatisfied', r)) + else: + n, v = provider.key, provider.version + if (n, v) not in self.dists: + todo.add(provider) + providers.add(provider) + if r in ireqts and dist in install_dists: + install_dists.add(provider) + logger.debug('Adding %s to install_dists', + provider.name_and_version) + for p in providers: + name = p.key + if name not in self.dists_by_name: + self.reqts.setdefault(p, set()).add(r) + else: + other = self.dists_by_name[name] + if other != p: + # see if other can be replaced by p + self.try_to_replace(p, other, problems) + + dists = set(self.dists.values()) + for dist in dists: + dist.build_time_dependency = dist not in install_dists + if dist.build_time_dependency: + logger.debug('%s is a build-time dependency only.', + dist.name_and_version) + logger.debug('find done for %s', odist) + return dists, problems diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/manifest.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/manifest.py new file mode 100644 index 0000000..ca0fe44 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/manifest.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2013 Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Class representing the list of files in a distribution. + +Equivalent to distutils.filelist, but fixes some problems. +""" +import fnmatch +import logging +import os +import re +import sys + +from . import DistlibException +from .compat import fsdecode +from .util import convert_path + + +__all__ = ['Manifest'] + +logger = logging.getLogger(__name__) + +# a \ followed by some spaces + EOL +_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M) +_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) + +# +# Due to the different results returned by fnmatch.translate, we need +# to do slightly different processing for Python 2.7 and 3.2 ... this needed +# to be brought in for Python 3.6 onwards. +# +_PYTHON_VERSION = sys.version_info[:2] + +class Manifest(object): + """A list of files built by on exploring the filesystem and filtered by + applying various patterns to what we find there. + """ + + def __init__(self, base=None): + """ + Initialise an instance. + + :param base: The base directory to explore under. + """ + self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) + self.prefix = self.base + os.sep + self.allfiles = None + self.files = set() + + # + # Public API + # + + def findall(self): + """Find all files under the base and set ``allfiles`` to the absolute + pathnames of files found. + """ + from stat import S_ISREG, S_ISDIR, S_ISLNK + + self.allfiles = allfiles = [] + root = self.base + stack = [root] + pop = stack.pop + push = stack.append + + while stack: + root = pop() + names = os.listdir(root) + + for name in names: + fullname = os.path.join(root, name) + + # Avoid excess stat calls -- just one will do, thank you! + stat = os.stat(fullname) + mode = stat.st_mode + if S_ISREG(mode): + allfiles.append(fsdecode(fullname)) + elif S_ISDIR(mode) and not S_ISLNK(mode): + push(fullname) + + def add(self, item): + """ + Add a file to the manifest. + + :param item: The pathname to add. This can be relative to the base. + """ + if not item.startswith(self.prefix): + item = os.path.join(self.base, item) + self.files.add(os.path.normpath(item)) + + def add_many(self, items): + """ + Add a list of files to the manifest. + + :param items: The pathnames to add. These can be relative to the base. + """ + for item in items: + self.add(item) + + def sorted(self, wantdirs=False): + """ + Return sorted files in directory order + """ + + def add_dir(dirs, d): + dirs.add(d) + logger.debug('add_dir added %s', d) + if d != self.base: + parent, _ = os.path.split(d) + assert parent not in ('', '/') + add_dir(dirs, parent) + + result = set(self.files) # make a copy! + if wantdirs: + dirs = set() + for f in result: + add_dir(dirs, os.path.dirname(f)) + result |= dirs + return [os.path.join(*path_tuple) for path_tuple in + sorted(os.path.split(path) for path in result)] + + def clear(self): + """Clear all collected files.""" + self.files = set() + self.allfiles = [] + + def process_directive(self, directive): + """ + Process a directive which either adds some files from ``allfiles`` to + ``files``, or removes some files from ``files``. + + :param directive: The directive to process. This should be in a format + compatible with distutils ``MANIFEST.in`` files: + + http://docs.python.org/distutils/sourcedist.html#commands + """ + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dirpattern). + action, patterns, thedir, dirpattern = self._parse_directive(directive) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=True): + logger.warning('no files found matching %r', pattern) + + elif action == 'exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=True) + #if not found: + # logger.warning('no previously-included files ' + # 'found matching %r', pattern) + + elif action == 'global-include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=False): + logger.warning('no files found matching %r ' + 'anywhere in distribution', pattern) + + elif action == 'global-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=False) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found anywhere in ' + # 'distribution', pattern) + + elif action == 'recursive-include': + for pattern in patterns: + if not self._include_pattern(pattern, prefix=thedir): + logger.warning('no files found matching %r ' + 'under directory %r', pattern, thedir) + + elif action == 'recursive-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, prefix=thedir) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found under directory %r', + # pattern, thedir) + + elif action == 'graft': + if not self._include_pattern(None, prefix=dirpattern): + logger.warning('no directories found matching %r', + dirpattern) + + elif action == 'prune': + if not self._exclude_pattern(None, prefix=dirpattern): + logger.warning('no previously-included directories found ' + 'matching %r', dirpattern) + else: # pragma: no cover + # This should never happen, as it should be caught in + # _parse_template_line + raise DistlibException( + 'invalid action %r' % action) + + # + # Private API + # + + def _parse_directive(self, directive): + """ + Validate a directive. + :param directive: The directive to validate. + :return: A tuple of action, patterns, thedir, dir_patterns + """ + words = directive.split() + if len(words) == 1 and words[0] not in ('include', 'exclude', + 'global-include', + 'global-exclude', + 'recursive-include', + 'recursive-exclude', + 'graft', 'prune'): + # no action given, let's use the default 'include' + words.insert(0, 'include') + + action = words[0] + patterns = thedir = dir_pattern = None + + if action in ('include', 'exclude', + 'global-include', 'global-exclude'): + if len(words) < 2: + raise DistlibException( + '%r expects ...' % action) + + patterns = [convert_path(word) for word in words[1:]] + + elif action in ('recursive-include', 'recursive-exclude'): + if len(words) < 3: + raise DistlibException( + '%r expects ...' % action) + + thedir = convert_path(words[1]) + patterns = [convert_path(word) for word in words[2:]] + + elif action in ('graft', 'prune'): + if len(words) != 2: + raise DistlibException( + '%r expects a single ' % action) + + dir_pattern = convert_path(words[1]) + + else: + raise DistlibException('unknown action %r' % action) + + return action, patterns, thedir, dir_pattern + + def _include_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. + + Patterns are not quite the same as implemented by the 'fnmatch' + module: '*' and '?' match non-special characters, where "special" + is platform-dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found. + """ + # XXX docstring lying about what the special chars are? + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + + # delayed loading of allfiles list + if self.allfiles is None: + self.findall() + + for name in self.allfiles: + if pattern_re.search(name): + self.files.add(name) + found = True + return found + + def _exclude_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. + + Other parameters are the same as for 'include_pattern()', above. + The list 'self.files' is modified in place. Return True if files are + found. + + This API is public to allow e.g. exclusion of SCM subdirs, e.g. when + packaging source distributions + """ + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + for f in list(self.files): + if pattern_re.search(f): + self.files.remove(f) + found = True + return found + + def _translate_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Translate a shell-like wildcard pattern to a compiled regular + expression. + + Return the compiled regex. If 'is_regex' true, + then 'pattern' is directly compiled to a regex (if it's a string) + or just returned as-is (assumes it's a regex object). + """ + if is_regex: + if isinstance(pattern, str): + return re.compile(pattern) + else: + return pattern + + if _PYTHON_VERSION > (3, 2): + # ditch start and end characters + start, _, end = self._glob_to_re('_').partition('_') + + if pattern: + pattern_re = self._glob_to_re(pattern) + if _PYTHON_VERSION > (3, 2): + assert pattern_re.startswith(start) and pattern_re.endswith(end) + else: + pattern_re = '' + + base = re.escape(os.path.join(self.base, '')) + if prefix is not None: + # ditch end of pattern character + if _PYTHON_VERSION <= (3, 2): + empty_pattern = self._glob_to_re('') + prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] + else: + prefix_re = self._glob_to_re(prefix) + assert prefix_re.startswith(start) and prefix_re.endswith(end) + prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] + sep = os.sep + if os.sep == '\\': + sep = r'\\' + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + sep.join((prefix_re, + '.*' + pattern_re)) + else: + pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] + pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, + pattern_re, end) + else: # no prefix -- respect anchor flag + if anchor: + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + pattern_re + else: + pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) + + return re.compile(pattern_re) + + def _glob_to_re(self, pattern): + """Translate a shell-like glob pattern to a regular expression. + + Return a string containing the regex. Differs from + 'fnmatch.translate()' in that '*' does not match "special characters" + (which are platform-specific). + """ + pattern_re = fnmatch.translate(pattern) + + # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which + # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, + # and by extension they shouldn't match such "special characters" under + # any OS. So change all non-escaped dots in the RE to match any + # character except the special characters (currently: just os.sep). + sep = os.sep + if os.sep == '\\': + # we're using a regex to manipulate a regex, so we need + # to escape the backslash twice + sep = r'\\\\' + escaped = r'\1[^%s]' % sep + pattern_re = re.sub(r'((? y, + '!=': lambda x, y: x != y, + '<': lambda x, y: x < y, + '<=': lambda x, y: x == y or x < y, + '>': lambda x, y: x > y, + '>=': lambda x, y: x == y or x > y, + 'and': lambda x, y: x and y, + 'or': lambda x, y: x or y, + 'in': lambda x, y: x in y, + 'not in': lambda x, y: x not in y, + } + + def evaluate(self, expr, context): + """ + Evaluate a marker expression returned by the :func:`parse_requirement` + function in the specified context. + """ + if isinstance(expr, string_types): + if expr[0] in '\'"': + result = expr[1:-1] + else: + if expr not in context: + raise SyntaxError('unknown variable: %s' % expr) + result = context[expr] + else: + assert isinstance(expr, dict) + op = expr['op'] + if op not in self.operations: + raise NotImplementedError('op not implemented: %s' % op) + elhs = expr['lhs'] + erhs = expr['rhs'] + if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): + raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) + + lhs = self.evaluate(elhs, context) + rhs = self.evaluate(erhs, context) + if ((elhs == 'python_version' or erhs == 'python_version') and + op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): + lhs = NV(lhs) + rhs = NV(rhs) + elif elhs == 'python_version' and op in ('in', 'not in'): + lhs = NV(lhs) + rhs = _get_versions(rhs) + result = self.operations[op](lhs, rhs) + return result + +def default_context(): + def format_full_version(info): + version = '%s.%s.%s' % (info.major, info.minor, info.micro) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + if hasattr(sys, 'implementation'): + implementation_version = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + implementation_version = '0' + implementation_name = '' + + result = { + 'implementation_name': implementation_name, + 'implementation_version': implementation_version, + 'os_name': os.name, + 'platform_machine': platform.machine(), + 'platform_python_implementation': platform.python_implementation(), + 'platform_release': platform.release(), + 'platform_system': platform.system(), + 'platform_version': platform.version(), + 'platform_in_venv': str(in_venv()), + 'python_full_version': platform.python_version(), + 'python_version': platform.python_version()[:3], + 'sys_platform': sys.platform, + } + return result + +DEFAULT_CONTEXT = default_context() +del default_context + +evaluator = Evaluator() + +def interpret(marker, execution_context=None): + """ + Interpret a marker and return a result depending on environment. + + :param marker: The marker to interpret. + :type marker: str + :param execution_context: The context used for name lookup. + :type execution_context: mapping + """ + try: + expr, rest = parse_marker(marker) + except Exception as e: + raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) + if rest and rest[0] != '#': + raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) + context = dict(DEFAULT_CONTEXT) + if execution_context: + context.update(execution_context) + return evaluator.evaluate(expr, context) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/metadata.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/metadata.py new file mode 100644 index 0000000..6a26b0a --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/metadata.py @@ -0,0 +1,1058 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Implementation of the Metadata for Python packages PEPs. + +Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and withdrawn 2.0). +""" +from __future__ import unicode_literals + +import codecs +from email import message_from_file +import json +import logging +import re + + +from . import DistlibException, __version__ +from .compat import StringIO, string_types, text_type +from .markers import interpret +from .util import extract_by_key, get_extras +from .version import get_scheme, PEP440_VERSION_RE + +logger = logging.getLogger(__name__) + + +class MetadataMissingError(DistlibException): + """A required metadata is missing""" + + +class MetadataConflictError(DistlibException): + """Attempt to read or write metadata fields that are conflictual.""" + + +class MetadataUnrecognizedVersionError(DistlibException): + """Unknown metadata version number.""" + + +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" + +# public API of this module +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] + +# Encoding used for the PKG-INFO files +PKG_INFO_ENCODING = 'utf-8' + +# preferred version. Hopefully will be changed +# to 1.2 once PEP 345 is supported everywhere +PKG_INFO_PREFERRED_VERSION = '1.1' + +_LINE_PREFIX_1_2 = re.compile('\n \\|') +_LINE_PREFIX_PRE_1_2 = re.compile('\n ') +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License', 'Classifier', 'Download-URL', 'Obsoletes', + 'Provides', 'Requires') + +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', + 'Download-URL') + +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External') + +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', + 'Obsoletes-Dist', 'Requires-External', 'Maintainer', + 'Maintainer-email', 'Project-URL') + +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', + 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', + 'Provides-Extra') + +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', + 'Setup-Requires-Dist', 'Extension') + +# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in +# the metadata. Include them in the tuple literal below to allow them +# (for now). +# Ditto for Obsoletes - see issue #140. +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', + 'Requires', 'Provides', 'Obsoletes') + +_566_MARKERS = ('Description-Content-Type',) + +_ALL_FIELDS = set() +_ALL_FIELDS.update(_241_FIELDS) +_ALL_FIELDS.update(_314_FIELDS) +_ALL_FIELDS.update(_345_FIELDS) +_ALL_FIELDS.update(_426_FIELDS) +_ALL_FIELDS.update(_566_FIELDS) + +EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + + +def _version2fieldlist(version): + if version == '1.0': + return _241_FIELDS + elif version == '1.1': + return _314_FIELDS + elif version == '1.2': + return _345_FIELDS + elif version in ('1.3', '2.1'): + # avoid adding field names if already there + return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS) + elif version == '2.0': + return _426_FIELDS + raise MetadataUnrecognizedVersionError(version) + + +def _best_version(fields): + """Detect the best version depending on the fields used.""" + def _has_marker(keys, markers): + for marker in markers: + if marker in keys: + return True + return False + + keys = [] + for key, value in fields.items(): + if value in ([], 'UNKNOWN', None): + continue + keys.append(key) + + possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1'] + + # first let's try to see if a field is not part of one of the version + for key in keys: + if key not in _241_FIELDS and '1.0' in possible_versions: + possible_versions.remove('1.0') + logger.debug('Removed 1.0 due to %s', key) + if key not in _314_FIELDS and '1.1' in possible_versions: + possible_versions.remove('1.1') + logger.debug('Removed 1.1 due to %s', key) + if key not in _345_FIELDS and '1.2' in possible_versions: + possible_versions.remove('1.2') + logger.debug('Removed 1.2 due to %s', key) + if key not in _566_FIELDS and '1.3' in possible_versions: + possible_versions.remove('1.3') + logger.debug('Removed 1.3 due to %s', key) + if key not in _566_FIELDS and '2.1' in possible_versions: + if key != 'Description': # In 2.1, description allowed after headers + possible_versions.remove('2.1') + logger.debug('Removed 2.1 due to %s', key) + if key not in _426_FIELDS and '2.0' in possible_versions: + possible_versions.remove('2.0') + logger.debug('Removed 2.0 due to %s', key) + + # possible_version contains qualified versions + if len(possible_versions) == 1: + return possible_versions[0] # found ! + elif len(possible_versions) == 0: + logger.debug('Out of options - unknown metadata set: %s', fields) + raise MetadataConflictError('Unknown metadata set') + + # let's see if one unique marker is found + is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) + is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) + is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) + is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) + if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1: + raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields') + + # we have the choice, 1.0, or 1.2, or 2.0 + # - 1.0 has a broken Summary field but works with all tools + # - 1.1 is to avoid + # - 1.2 fixes Summary but has little adoption + # - 2.0 adds more features and is very new + if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0: + # we couldn't find any specific marker + if PKG_INFO_PREFERRED_VERSION in possible_versions: + return PKG_INFO_PREFERRED_VERSION + if is_1_1: + return '1.1' + if is_1_2: + return '1.2' + if is_2_1: + return '2.1' + + return '2.0' + +# This follows the rules about transforming keys as described in +# https://www.python.org/dev/peps/pep-0566/#id17 +_ATTR2FIELD = { + name.lower().replace("-", "_"): name for name in _ALL_FIELDS +} +_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} + +_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') +_VERSIONS_FIELDS = ('Requires-Python',) +_VERSION_FIELDS = ('Version',) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', + 'Requires', 'Provides', 'Obsoletes-Dist', + 'Provides-Dist', 'Requires-Dist', 'Requires-External', + 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', + 'Provides-Extra', 'Extension') +_LISTTUPLEFIELDS = ('Project-URL',) + +_ELEMENTSFIELD = ('Keywords',) + +_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') + +_MISSING = object() + +_FILESAFE = re.compile('[^A-Za-z0-9.]+') + + +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + + +class LegacyMetadata(object): + """The legacy metadata of a release. + + Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can + instantiate the class with one of these arguments (or none): + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content + - *mapping* is a dict-like object + - *scheme* is a version scheme name + """ + # TODO document the mapping API and UNKNOWN default key + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._fields = {} + self.requires_files = [] + self._dependencies = None + self.scheme = scheme + if path is not None: + self.read(path) + elif fileobj is not None: + self.read_file(fileobj) + elif mapping is not None: + self.update(mapping) + self.set_metadata_version() + + def set_metadata_version(self): + self._fields['Metadata-Version'] = _best_version(self._fields) + + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) + + def __getitem__(self, name): + return self.get(name) + + def __setitem__(self, name, value): + return self.set(name, value) + + def __delitem__(self, name): + field_name = self._convert_name(name) + try: + del self._fields[field_name] + except KeyError: + raise KeyError(name) + + def __contains__(self, name): + return (name in self._fields or + self._convert_name(name) in self._fields) + + def _convert_name(self, name): + if name in _ALL_FIELDS: + return name + name = name.replace('-', '_').lower() + return _ATTR2FIELD.get(name, name) + + def _default_value(self, name): + if name in _LISTFIELDS or name in _ELEMENTSFIELD: + return [] + return 'UNKNOWN' + + def _remove_line_prefix(self, value): + if self.metadata_version in ('1.0', '1.1'): + return _LINE_PREFIX_PRE_1_2.sub('\n', value) + else: + return _LINE_PREFIX_1_2.sub('\n', value) + + def __getattr__(self, name): + if name in _ATTR2FIELD: + return self[name] + raise AttributeError(name) + + # + # Public API + # + +# dependencies = property(_get_dependencies, _set_dependencies) + + def get_fullname(self, filesafe=False): + """Return the distribution name with version. + + If filesafe is true, return a filename-escaped form.""" + return _get_name_and_version(self['Name'], self['Version'], filesafe) + + def is_field(self, name): + """return True if name is a valid metadata key""" + name = self._convert_name(name) + return name in _ALL_FIELDS + + def is_multi_field(self, name): + name = self._convert_name(name) + return name in _LISTFIELDS + + def read(self, filepath): + """Read the metadata values from a file path.""" + fp = codecs.open(filepath, 'r', encoding='utf-8') + try: + self.read_file(fp) + finally: + fp.close() + + def read_file(self, fileob): + """Read the metadata values from a file object.""" + msg = message_from_file(fileob) + self._fields['Metadata-Version'] = msg['metadata-version'] + + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue + if field in _LISTFIELDS: + # we can have multiple lines + values = msg.get_all(field) + if field in _LISTTUPLEFIELDS and values is not None: + values = [tuple(value.split(',')) for value in values] + self.set(field, values) + else: + # single line + value = msg[field] + if value is not None and value != 'UNKNOWN': + self.set(field, value) + + # PEP 566 specifies that the body be used for the description, if + # available + body = msg.get_payload() + self["Description"] = body if body else self["Description"] + # logger.debug('Attempting to set metadata for %s', self) + # self.set_metadata_version() + + def write(self, filepath, skip_unknown=False): + """Write the metadata fields to filepath.""" + fp = codecs.open(filepath, 'w', encoding='utf-8') + try: + self.write_file(fp, skip_unknown) + finally: + fp.close() + + def write_file(self, fileobject, skip_unknown=False): + """Write the PKG-INFO format data to a file object.""" + self.set_metadata_version() + + for field in _version2fieldlist(self['Metadata-Version']): + values = self.get(field) + if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): + continue + if field in _ELEMENTSFIELD: + self._write_field(fileobject, field, ','.join(values)) + continue + if field not in _LISTFIELDS: + if field == 'Description': + if self.metadata_version in ('1.0', '1.1'): + values = values.replace('\n', '\n ') + else: + values = values.replace('\n', '\n |') + values = [values] + + if field in _LISTTUPLEFIELDS: + values = [','.join(value) for value in values] + + for value in values: + self._write_field(fileobject, field, value) + + def update(self, other=None, **kwargs): + """Set metadata values from the given iterable `other` and kwargs. + + Behavior is like `dict.update`: If `other` has a ``keys`` method, + they are looped over and ``self[key]`` is assigned ``other[key]``. + Else, ``other`` is an iterable of ``(key, value)`` iterables. + + Keys that don't match a metadata field or that have an empty value are + dropped. + """ + def _set(key, value): + if key in _ATTR2FIELD and value: + self.set(self._convert_name(key), value) + + if not other: + # other is None or empty container + pass + elif hasattr(other, 'keys'): + for k in other.keys(): + _set(k, other[k]) + else: + for k, v in other: + _set(k, v) + + if kwargs: + for k, v in kwargs.items(): + _set(k, v) + + def set(self, name, value): + """Control then set a metadata field.""" + name = self._convert_name(name) + + if ((name in _ELEMENTSFIELD or name == 'Platform') and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [v.strip() for v in value.split(',')] + else: + value = [] + elif (name in _LISTFIELDS and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [value] + else: + value = [] + + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + + scheme = get_scheme(self.scheme) + if name in _PREDICATE_FIELDS and value is not None: + for v in value: + # check that the values are valid + if not scheme.is_valid_matcher(v.split(';')[0]): + logger.warning( + "'%s': '%s' is not valid (field '%s')", + project_name, v, name) + # FIXME this rejects UNKNOWN, is that right? + elif name in _VERSIONS_FIELDS and value is not None: + if not scheme.is_valid_constraint_list(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + elif name in _VERSION_FIELDS and value is not None: + if not scheme.is_valid_version(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + + if name in _UNICODEFIELDS: + if name == 'Description': + value = self._remove_line_prefix(value) + + self._fields[name] = value + + def get(self, name, default=_MISSING): + """Get a metadata field.""" + name = self._convert_name(name) + if name not in self._fields: + if default is _MISSING: + default = self._default_value(name) + return default + if name in _UNICODEFIELDS: + value = self._fields[name] + return value + elif name in _LISTFIELDS: + value = self._fields[name] + if value is None: + return [] + res = [] + for val in value: + if name not in _LISTTUPLEFIELDS: + res.append(val) + else: + # That's for Project-URL + res.append((val[0], val[1])) + return res + + elif name in _ELEMENTSFIELD: + value = self._fields[name] + if isinstance(value, string_types): + return value.split(',') + return self._fields[name] + + def check(self, strict=False): + """Check if the metadata is compliant. If strict is True then raise if + no Name or Version are provided""" + self.set_metadata_version() + + # XXX should check the versions (if the file was loaded) + missing, warnings = [], [] + + for attr in ('Name', 'Version'): # required by PEP 345 + if attr not in self: + missing.append(attr) + + if strict and missing != []: + msg = 'missing required metadata: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + + for attr in ('Home-page', 'Author'): + if attr not in self: + missing.append(attr) + + # checking metadata 1.2 (XXX needs to check 1.1, 1.0) + if self['Metadata-Version'] != '1.2': + return missing, warnings + + scheme = get_scheme(self.scheme) + + def are_valid_constraints(value): + for v in value: + if not scheme.is_valid_matcher(v.split(';')[0]): + return False + return True + + for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), + (_VERSIONS_FIELDS, + scheme.is_valid_constraint_list), + (_VERSION_FIELDS, + scheme.is_valid_version)): + for field in fields: + value = self.get(field, None) + if value is not None and not controller(value): + warnings.append("Wrong value for '%s': %s" % (field, value)) + + return missing, warnings + + def todict(self, skip_missing=False): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + This is as per https://www.python.org/dev/peps/pep-0566/#id17. + """ + self.set_metadata_version() + + fields = _version2fieldlist(self['Metadata-Version']) + + data = {} + + for field_name in fields: + if not skip_missing or field_name in self._fields: + key = _FIELD2ATTR[field_name] + if key != 'project_url': + data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] + + return data + + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + + # Mapping API + # TODO could add iter* variants + + def keys(self): + return list(_version2fieldlist(self['Metadata-Version'])) + + def __iter__(self): + for key in self.keys(): + yield key + + def values(self): + return [self[key] for key in self.keys()] + + def items(self): + return [(key, self[key]) for key in self.keys()] + + def __repr__(self): + return '<%s %s %s>' % (self.__class__.__name__, self.name, + self.version) + + +METADATA_FILENAME = 'pydist.json' +WHEEL_METADATA_FILENAME = 'metadata.json' +LEGACY_METADATA_FILENAME = 'METADATA' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.0 (JSON) + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP440_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy',), + } + + INDEX_KEYS = ('name version license summary description author ' + 'author_email keywords platform home_page classifiers ' + 'download_url') + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy',)), + 'version': (VERSION_MATCHER, ('legacy',)), + 'summary': (SUMMARY_MATCHER, ('legacy',)), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + #import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with open(path, 'rb') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + if not isinstance(data, text_type): + data = data.decode('utf-8') + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), + scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + none_list = (None, list) + none_dict = (None, dict) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': none_list, + 'test_requires': none_list, + 'meta_requires': none_list, + 'extras': ('Provides-Extra', list), + 'modules': none_list, + 'namespaces': none_list, + 'exports': none_dict, + 'commands': none_dict, + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + del none_list, none_dict + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + if key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + result = self._data.get(key, value) + else: + # special cases for PEP 459 + sentinel = object() + result = sentinel + d = self._data.get('extensions') + if d: + if key == 'commands': + result = d.get('python.commands', value) + elif key == 'classifiers': + d = d.get('python.details') + if d: + result = d.get(key, value) + else: + d = d.get('python.exports') + if not d: + d = self._data.get('python.exports') + if d: + result = d.get(key, value) + if result is sentinel: + result = value + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError("'%s' is an invalid value for " + "the '%s' property" % (value, + key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + elif key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + self._data[key] = value + else: + # special cases for PEP 459 + d = self._data.setdefault('extensions', {}) + if key == 'commands': + d['python.commands'] = value + elif key == 'classifiers': + d = d.setdefault('python.details', {}) + d[key] = value + else: + d = d.setdefault('python.exports', {}) + d[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = get_extras(extras or [], self.extras) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, + env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', + missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', + 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), + ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + author = {} + maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + ('extensions', 'python.details', 'license'): 'License', + 'summary': 'Summary', + 'description': 'Description', + ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', + ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', + ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', + 'source_url': 'Download-URL', + ('extensions', 'python.details', 'classifiers'): 'Classifier', + } + + def _to_legacy(self): + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == "%s"' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + # import pdb; pdb.set_trace() + for nk, ok in self.LEGACY_MAPPING.items(): + if not isinstance(nk, tuple): + if nk in nmd: + result[ok] = nmd[nk] + else: + d = nmd + found = True + for k in nk: + try: + d = d[k] + except (KeyError, IndexError): + found = False + break + if found: + result[ok] = d + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: any other fields wanted + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2, + sort_keys=True) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2, + sort_keys=True) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + run_requires = self._data.setdefault('run_requires', []) + always = None + for entry in run_requires: + if 'environment' not in entry and 'extra' not in entry: + always = entry + break + if always is None: + always = { 'requires': requirements } + run_requires.insert(0, always) + else: + rset = set(always['requires']) | set(requirements) + always['requires'] = sorted(rset) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, + self.metadata_version, name, version) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/resources.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/resources.py new file mode 100644 index 0000000..fef52aa --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/resources.py @@ -0,0 +1,358 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import bisect +import io +import logging +import os +import pkgutil +import sys +import types +import zipimport + +from . import DistlibException +from .util import cached_property, get_cache_base, Cache + +logger = logging.getLogger(__name__) + + +cache = None # created when needed + + +class ResourceCache(Cache): + def __init__(self, base=None): + if base is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('resource-cache')) + super(ResourceCache, self).__init__(base) + + def is_stale(self, resource, path): + """ + Is the cache stale for the given resource? + + :param resource: The :class:`Resource` being cached. + :param path: The path of the resource in the cache. + :return: True if the cache is stale. + """ + # Cache invalidation is a hard problem :-) + return True + + def get(self, resource): + """ + Get a resource into the cache, + + :param resource: A :class:`Resource` instance. + :return: The pathname of the resource in the cache. + """ + prefix, path = resource.finder.get_cache_info(resource) + if prefix is None: + result = path + else: + result = os.path.join(self.base, self.prefix_to_dir(prefix), path) + dirname = os.path.dirname(result) + if not os.path.isdir(dirname): + os.makedirs(dirname) + if not os.path.exists(result): + stale = True + else: + stale = self.is_stale(resource, path) + if stale: + # write the bytes of the resource to the cache location + with open(result, 'wb') as f: + f.write(resource.bytes) + return result + + +class ResourceBase(object): + def __init__(self, finder, name): + self.finder = finder + self.name = name + + +class Resource(ResourceBase): + """ + A class representing an in-package resource, such as a data file. This is + not normally instantiated by user code, but rather by a + :class:`ResourceFinder` which manages the resource. + """ + is_container = False # Backwards compatibility + + def as_stream(self): + """ + Get the resource as a stream. + + This is not a property to make it obvious that it returns a new stream + each time. + """ + return self.finder.get_stream(self) + + @cached_property + def file_path(self): + global cache + if cache is None: + cache = ResourceCache() + return cache.get(self) + + @cached_property + def bytes(self): + return self.finder.get_bytes(self) + + @cached_property + def size(self): + return self.finder.get_size(self) + + +class ResourceContainer(ResourceBase): + is_container = True # Backwards compatibility + + @cached_property + def resources(self): + return self.finder.get_resources(self) + + +class ResourceFinder(object): + """ + Resource finder for file system resources. + """ + + if sys.platform.startswith('java'): + skipped_extensions = ('.pyc', '.pyo', '.class') + else: + skipped_extensions = ('.pyc', '.pyo') + + def __init__(self, module): + self.module = module + self.loader = getattr(module, '__loader__', None) + self.base = os.path.dirname(getattr(module, '__file__', '')) + + def _adjust_path(self, path): + return os.path.realpath(path) + + def _make_path(self, resource_name): + # Issue #50: need to preserve type of path on Python 2.x + # like os.path._get_sep + if isinstance(resource_name, bytes): # should only happen on 2.x + sep = b'/' + else: + sep = '/' + parts = resource_name.split(sep) + parts.insert(0, self.base) + result = os.path.join(*parts) + return self._adjust_path(result) + + def _find(self, path): + return os.path.exists(path) + + def get_cache_info(self, resource): + return None, resource.path + + def find(self, resource_name): + path = self._make_path(resource_name) + if not self._find(path): + result = None + else: + if self._is_directory(path): + result = ResourceContainer(self, resource_name) + else: + result = Resource(self, resource_name) + result.path = path + return result + + def get_stream(self, resource): + return open(resource.path, 'rb') + + def get_bytes(self, resource): + with open(resource.path, 'rb') as f: + return f.read() + + def get_size(self, resource): + return os.path.getsize(resource.path) + + def get_resources(self, resource): + def allowed(f): + return (f != '__pycache__' and not + f.endswith(self.skipped_extensions)) + return set([f for f in os.listdir(resource.path) if allowed(f)]) + + def is_container(self, resource): + return self._is_directory(resource.path) + + _is_directory = staticmethod(os.path.isdir) + + def iterator(self, resource_name): + resource = self.find(resource_name) + if resource is not None: + todo = [resource] + while todo: + resource = todo.pop(0) + yield resource + if resource.is_container: + rname = resource.name + for name in resource.resources: + if not rname: + new_name = name + else: + new_name = '/'.join([rname, name]) + child = self.find(new_name) + if child.is_container: + todo.append(child) + else: + yield child + + +class ZipResourceFinder(ResourceFinder): + """ + Resource finder for resources in .zip files. + """ + def __init__(self, module): + super(ZipResourceFinder, self).__init__(module) + archive = self.loader.archive + self.prefix_len = 1 + len(archive) + # PyPy doesn't have a _files attr on zipimporter, and you can't set one + if hasattr(self.loader, '_files'): + self._files = self.loader._files + else: + self._files = zipimport._zip_directory_cache[archive] + self.index = sorted(self._files) + + def _adjust_path(self, path): + return path + + def _find(self, path): + path = path[self.prefix_len:] + if path in self._files: + result = True + else: + if path and path[-1] != os.sep: + path = path + os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + if not result: + logger.debug('_find failed: %r %r', path, self.loader.prefix) + else: + logger.debug('_find worked: %r %r', path, self.loader.prefix) + return result + + def get_cache_info(self, resource): + prefix = self.loader.archive + path = resource.path[1 + len(prefix):] + return prefix, path + + def get_bytes(self, resource): + return self.loader.get_data(resource.path) + + def get_stream(self, resource): + return io.BytesIO(self.get_bytes(resource)) + + def get_size(self, resource): + path = resource.path[self.prefix_len:] + return self._files[path][3] + + def get_resources(self, resource): + path = resource.path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + plen = len(path) + result = set() + i = bisect.bisect(self.index, path) + while i < len(self.index): + if not self.index[i].startswith(path): + break + s = self.index[i][plen:] + result.add(s.split(os.sep, 1)[0]) # only immediate children + i += 1 + return result + + def _is_directory(self, path): + path = path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + return result + + +_finder_registry = { + type(None): ResourceFinder, + zipimport.zipimporter: ZipResourceFinder +} + +try: + # In Python 3.6, _frozen_importlib -> _frozen_importlib_external + try: + import _frozen_importlib_external as _fi + except ImportError: + import _frozen_importlib as _fi + _finder_registry[_fi.SourceFileLoader] = ResourceFinder + _finder_registry[_fi.FileFinder] = ResourceFinder + # See issue #146 + _finder_registry[_fi.SourcelessFileLoader] = ResourceFinder + del _fi +except (ImportError, AttributeError): + pass + + +def register_finder(loader, finder_maker): + _finder_registry[type(loader)] = finder_maker + + +_finder_cache = {} + + +def finder(package): + """ + Return a resource finder for a package. + :param package: The name of the package. + :return: A :class:`ResourceFinder` instance for the package. + """ + if package in _finder_cache: + result = _finder_cache[package] + else: + if package not in sys.modules: + __import__(package) + module = sys.modules[package] + path = getattr(module, '__path__', None) + if path is None: + raise DistlibException('You cannot get a finder for a module, ' + 'only for a package') + loader = getattr(module, '__loader__', None) + finder_maker = _finder_registry.get(type(loader)) + if finder_maker is None: + raise DistlibException('Unable to locate finder for %r' % package) + result = finder_maker(module) + _finder_cache[package] = result + return result + + +_dummy_module = types.ModuleType(str('__dummy__')) + + +def finder_for_path(path): + """ + Return a resource finder for a path, which should represent a container. + + :param path: The path. + :return: A :class:`ResourceFinder` instance for the path. + """ + result = None + # calls any path hooks, gets importer into cache + pkgutil.get_importer(path) + loader = sys.path_importer_cache.get(path) + finder = _finder_registry.get(type(loader)) + if finder: + module = _dummy_module + module.__file__ = os.path.join(path, '') + module.__loader__ = loader + result = finder(module) + return result diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/scripts.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/scripts.py new file mode 100644 index 0000000..913912c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/scripts.py @@ -0,0 +1,429 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from io import BytesIO +import logging +import os +import re +import struct +import sys + +from .compat import sysconfig, detect_encoding, ZipFile +from .resources import finder +from .util import (FileOperator, get_export_entry, convert_path, + get_executable, get_platform, in_venv) + +logger = logging.getLogger(__name__) + +_DEFAULT_MANIFEST = ''' + + + + + + + + + + + + +'''.strip() + +# check if Python is called on the first line with this expression +FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') +SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +import re +import sys +from %(module)s import %(import_name)s +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +''' + + +def enquote_executable(executable): + if ' ' in executable: + # make sure we quote only the executable in case of env + # for example /usr/bin/env "/dir with spaces/bin/jython" + # instead of "/usr/bin/env /dir with spaces/bin/jython" + # otherwise whole + if executable.startswith('/usr/bin/env '): + env, _executable = executable.split(' ', 1) + if ' ' in _executable and not _executable.startswith('"'): + executable = '%s "%s"' % (env, _executable) + else: + if not executable.startswith('"'): + executable = '"%s"' % executable + return executable + +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable + +class ScriptMaker(object): + """ + A class to copy or create scripts from source scripts or callable + specifications. + """ + script_template = SCRIPT_TEMPLATE + + executable = None # for shebangs + + def __init__(self, source_dir, target_dir, add_launchers=True, + dry_run=False, fileop=None): + self.source_dir = source_dir + self.target_dir = target_dir + self.add_launchers = add_launchers + self.force = False + self.clobber = False + # It only makes sense to set mode bits on POSIX. + self.set_mode = (os.name == 'posix') or (os.name == 'java' and + os._name == 'posix') + self.variants = set(('', 'X.Y')) + self._fileop = fileop or FileOperator(dry_run) + + self._is_nt = os.name == 'nt' or ( + os.name == 'java' and os._name == 'nt') + self.version_info = sys.version_info + + def _get_alternate_executable(self, executable, options): + if options.get('gui', False) and self._is_nt: # pragma: no cover + dn, fn = os.path.split(executable) + fn = fn.replace('python', 'pythonw') + executable = os.path.join(dn, fn) + return executable + + if sys.platform.startswith('java'): # pragma: no cover + def _is_shell(self, executable): + """ + Determine if the specified executable is a script + (contains a #! line) + """ + try: + with open(executable) as fp: + return fp.read(2) == '#!' + except (OSError, IOError): + logger.warning('Failed to open %s', executable) + return False + + def _fix_jython_executable(self, executable): + if self._is_shell(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty('os.name') == 'Linux': + return executable + elif executable.lower().endswith('jython.exe'): + # Use wrapper exe for Jython on Windows + return executable + return '/usr/bin/env %s' % executable + + def _build_shebang(self, executable, post_interp): + """ + Build a shebang line. In the simple case (on Windows, or a shebang line + which is not too long or contains spaces) use a simple formulation for + the shebang. Otherwise, use /bin/sh as the executable, with a contrived + shebang which allows the script to run either under Python or sh, using + suitable quoting. Thanks to Harald Nordgren for his input. + + See also: http://www.in-ulm.de/~mascheck/various/shebang/#length + https://hg.mozilla.org/mozilla-central/file/tip/mach + """ + if os.name != 'posix': + simple_shebang = True + else: + # Add 3 for '#!' prefix and newline suffix. + shebang_length = len(executable) + len(post_interp) + 3 + if sys.platform == 'darwin': + max_shebang_length = 512 + else: + max_shebang_length = 127 + simple_shebang = ((b' ' not in executable) and + (shebang_length <= max_shebang_length)) + + if simple_shebang: + result = b'#!' + executable + post_interp + b'\n' + else: + result = b'#!/bin/sh\n' + result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' + result += b"' '''" + return result + + def _get_shebang(self, encoding, post_interp=b'', options=None): + enquote = True + if self.executable: + executable = self.executable + enquote = False # assume this will be taken care of + elif not sysconfig.is_python_build(): + executable = get_executable() + elif in_venv(): # pragma: no cover + executable = os.path.join(sysconfig.get_path('scripts'), + 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) + if not os.path.isfile(executable): + # for Python builds from source on Windows, no Python executables with + # a version suffix are created, so we use python.exe + executable = os.path.join(sysconfig.get_config_var('BINDIR'), + 'python%s' % (sysconfig.get_config_var('EXE'))) + if options: + executable = self._get_alternate_executable(executable, options) + + if sys.platform.startswith('java'): # pragma: no cover + executable = self._fix_jython_executable(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + + # If the user didn't specify an executable, it may be necessary to + # cater for executable paths with spaces (not uncommon on Windows) + if enquote: + executable = enquote_executable(executable) + # Issue #51: don't use fsencode, since we later try to + # check that the shebang is decodable using utf-8. + executable = executable.encode('utf-8') + # in case of IronPython, play safe and enable frames support + if (sys.platform == 'cli' and '-X:Frames' not in post_interp + and '-X:FullFrames' not in post_interp): # pragma: no cover + post_interp += b' -X:Frames' + shebang = self._build_shebang(executable, post_interp) + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable from utf-8' % shebang) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + if encoding != 'utf-8': + try: + shebang.decode(encoding) + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % (shebang, encoding)) + return shebang + + def _get_script_text(self, entry): + return self.script_template % dict(module=entry.prefix, + import_name=entry.suffix.split('.')[0], + func=entry.suffix) + + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + use_launcher = self.add_launchers and self._is_nt + linesep = os.linesep.encode('utf-8') + if not shebang.endswith(linesep): + shebang += linesep + if not use_launcher: + script_bytes = shebang + script_bytes + else: # pragma: no cover + if ext == 'py': + launcher = self._get_launcher('t') + else: + launcher = self._get_launcher('w') + stream = BytesIO() + with ZipFile(stream, 'w') as zf: + zf.writestr('__main__.py', script_bytes) + zip_data = stream.getvalue() + script_bytes = launcher + shebang + zip_data + for name in names: + outname = os.path.join(self.target_dir, name) + if use_launcher: # pragma: no cover + n, e = os.path.splitext(outname) + if e.startswith('.py'): + outname = n + outname = '%s.exe' % outname + try: + self._fileop.write_binary_file(outname, script_bytes) + except Exception: + # Failed writing an executable - it might be in use. + logger.warning('Failed to write executable - trying to ' + 'use .deleteme logic') + dfname = '%s.deleteme' % outname + if os.path.exists(dfname): + os.remove(dfname) # Not allowed to fail here + os.rename(outname, dfname) # nor here + self._fileop.write_binary_file(outname, script_bytes) + logger.debug('Able to replace executable using ' + '.deleteme logic') + try: + os.remove(dfname) + except Exception: + pass # still in use - ignore error + else: + if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + outname = '%s.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_binary_file(outname, script_bytes) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + + variant_separator = '-' + + def get_script_filenames(self, name): + result = set() + if '' in self.variants: + result.add(name) + if 'X' in self.variants: + result.add('%s%s' % (name, self.version_info[0])) + if 'X.Y' in self.variants: + result.add('%s%s%s.%s' % (name, self.variant_separator, + self.version_info[0], self.version_info[1])) + return result + + def _make_script(self, entry, filenames, options=None): + post_interp = b'' + if options: + args = options.get('interpreter_args', []) + if args: + args = ' %s' % ' '.join(args) + post_interp = args.encode('utf-8') + shebang = self._get_shebang('utf-8', post_interp, options=options) + script = self._get_script_text(entry).encode('utf-8') + scriptnames = self.get_script_filenames(entry.name) + if options and options.get('gui', False): + ext = 'pyw' + else: + ext = 'py' + self._write_script(scriptnames, shebang, script, filenames, ext) + + def _copy_script(self, script, filenames): + adjust = False + script = os.path.join(self.source_dir, convert_path(script)) + outname = os.path.join(self.target_dir, os.path.basename(script)) + if not self.force and not self._fileop.newer(script, outname): + logger.debug('not copying %s (up-to-date)', script) + return + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, 'rb') + except IOError: # pragma: no cover + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: # pragma: no cover + logger.warning('%s is an empty file (skipping)', script) + return + + match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if not adjust: + if f: + f.close() + self._fileop.copy_file(script, outname) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + else: + logger.info('copying and adjusting %s -> %s', script, + self.target_dir) + if not self._fileop.dry_run: + encoding, lines = detect_encoding(f.readline) + f.seek(0) + shebang = self._get_shebang(encoding, post_interp) + if b'pythonw' in first_line: # pragma: no cover + ext = 'pyw' + else: + ext = 'py' + n = os.path.basename(outname) + self._write_script([n], shebang, f.read(), filenames, ext) + if f: + f.close() + + @property + def dry_run(self): + return self._fileop.dry_run + + @dry_run.setter + def dry_run(self, value): + self._fileop.dry_run = value + + if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + # Executable launcher support. + # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ + + def _get_launcher(self, kind): + if struct.calcsize('P') == 8: # 64-bit + bits = '64' + else: + bits = '32' + platform_suffix = '-arm' if get_platform() == 'win-arm64' else '' + name = '%s%s%s.exe' % (kind, bits, platform_suffix) + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + distlib_package = __name__.rsplit('.', 1)[0] + resource = finder(distlib_package).find(name) + if not resource: + msg = ('Unable to find resource %s in package %s' % (name, + distlib_package)) + raise ValueError(msg) + return resource.bytes + + # Public API follows + + def make(self, specification, options=None): + """ + Make a script. + + :param specification: The specification, which is either a valid export + entry specification (to make a script from a + callable) or a filename (to make a script by + copying from a source location). + :param options: A dictionary of options controlling script generation. + :return: A list of all absolute pathnames written to. + """ + filenames = [] + entry = get_export_entry(specification) + if entry is None: + self._copy_script(specification, filenames) + else: + self._make_script(entry, filenames, options=options) + return filenames + + def make_multiple(self, specifications, options=None): + """ + Take a list of specifications and make scripts from them, + :param specifications: A list of specifications. + :return: A list of all absolute pathnames written to, + """ + filenames = [] + for specification in specifications: + filenames.extend(self.make(specification, options)) + return filenames diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/util.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/util.py new file mode 100644 index 0000000..80bfc86 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/util.py @@ -0,0 +1,1969 @@ +# +# Copyright (C) 2012-2021 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import codecs +from collections import deque +import contextlib +import csv +from glob import iglob as std_iglob +import io +import json +import logging +import os +import py_compile +import re +import socket +try: + import ssl +except ImportError: # pragma: no cover + ssl = None +import subprocess +import sys +import tarfile +import tempfile +import textwrap + +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import time + +from . import DistlibException +from .compat import (string_types, text_type, shutil, raw_input, StringIO, + cache_from_source, urlopen, urljoin, httplib, xmlrpclib, + splittype, HTTPHandler, BaseConfigurator, valid_ident, + Container, configparser, URLError, ZipFile, fsdecode, + unquote, urlparse) + +logger = logging.getLogger(__name__) + +# +# Requirement parsing code as per PEP 508 +# + +IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') +VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') +COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') +MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') +OR = re.compile(r'^or\b\s*') +AND = re.compile(r'^and\b\s*') +NON_SPACE = re.compile(r'(\S+)\s*') +STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') + + +def parse_marker(marker_string): + """ + Parse a marker string and return a dictionary containing a marker expression. + + The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in + the expression grammar, or strings. A string contained in quotes is to be + interpreted as a literal string, and a string not contained in quotes is a + variable (such as os_name). + """ + def marker_var(remaining): + # either identifier, or literal string + m = IDENTIFIER.match(remaining) + if m: + result = m.groups()[0] + remaining = remaining[m.end():] + elif not remaining: + raise SyntaxError('unexpected end of input') + else: + q = remaining[0] + if q not in '\'"': + raise SyntaxError('invalid expression: %s' % remaining) + oq = '\'"'.replace(q, '') + remaining = remaining[1:] + parts = [q] + while remaining: + # either a string chunk, or oq, or q to terminate + if remaining[0] == q: + break + elif remaining[0] == oq: + parts.append(oq) + remaining = remaining[1:] + else: + m = STRING_CHUNK.match(remaining) + if not m: + raise SyntaxError('error in string literal: %s' % remaining) + parts.append(m.groups()[0]) + remaining = remaining[m.end():] + else: + s = ''.join(parts) + raise SyntaxError('unterminated string: %s' % s) + parts.append(q) + result = ''.join(parts) + remaining = remaining[1:].lstrip() # skip past closing quote + return result, remaining + + def marker_expr(remaining): + if remaining and remaining[0] == '(': + result, remaining = marker(remaining[1:].lstrip()) + if remaining[0] != ')': + raise SyntaxError('unterminated parenthesis: %s' % remaining) + remaining = remaining[1:].lstrip() + else: + lhs, remaining = marker_var(remaining) + while remaining: + m = MARKER_OP.match(remaining) + if not m: + break + op = m.groups()[0] + remaining = remaining[m.end():] + rhs, remaining = marker_var(remaining) + lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} + result = lhs + return result, remaining + + def marker_and(remaining): + lhs, remaining = marker_expr(remaining) + while remaining: + m = AND.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_expr(remaining) + lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + def marker(remaining): + lhs, remaining = marker_and(remaining) + while remaining: + m = OR.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_and(remaining) + lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + return marker(marker_string) + + +def parse_requirement(req): + """ + Parse a requirement passed in as a string. Return a Container + whose attributes contain the various parts of the requirement. + """ + remaining = req.strip() + if not remaining or remaining.startswith('#'): + return None + m = IDENTIFIER.match(remaining) + if not m: + raise SyntaxError('name expected: %s' % remaining) + distname = m.groups()[0] + remaining = remaining[m.end():] + extras = mark_expr = versions = uri = None + if remaining and remaining[0] == '[': + i = remaining.find(']', 1) + if i < 0: + raise SyntaxError('unterminated extra: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + extras = [] + while s: + m = IDENTIFIER.match(s) + if not m: + raise SyntaxError('malformed extra: %s' % s) + extras.append(m.groups()[0]) + s = s[m.end():] + if not s: + break + if s[0] != ',': + raise SyntaxError('comma expected in extras: %s' % s) + s = s[1:].lstrip() + if not extras: + extras = None + if remaining: + if remaining[0] == '@': + # it's a URI + remaining = remaining[1:].lstrip() + m = NON_SPACE.match(remaining) + if not m: + raise SyntaxError('invalid URI: %s' % remaining) + uri = m.groups()[0] + t = urlparse(uri) + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not (t.scheme and t.netloc): + raise SyntaxError('Invalid URL: %s' % uri) + remaining = remaining[m.end():].lstrip() + else: + + def get_versions(ver_remaining): + """ + Return a list of operator, version tuples if any are + specified, else None. + """ + m = COMPARE_OP.match(ver_remaining) + versions = None + if m: + versions = [] + while True: + op = m.groups()[0] + ver_remaining = ver_remaining[m.end():] + m = VERSION_IDENTIFIER.match(ver_remaining) + if not m: + raise SyntaxError('invalid version: %s' % ver_remaining) + v = m.groups()[0] + versions.append((op, v)) + ver_remaining = ver_remaining[m.end():] + if not ver_remaining or ver_remaining[0] != ',': + break + ver_remaining = ver_remaining[1:].lstrip() + # Some packages have a trailing comma which would break things + # See issue #148 + if not ver_remaining: + break + m = COMPARE_OP.match(ver_remaining) + if not m: + raise SyntaxError('invalid constraint: %s' % ver_remaining) + if not versions: + versions = None + return versions, ver_remaining + + if remaining[0] != '(': + versions, remaining = get_versions(remaining) + else: + i = remaining.find(')', 1) + if i < 0: + raise SyntaxError('unterminated parenthesis: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + # As a special diversion from PEP 508, allow a version number + # a.b.c in parentheses as a synonym for ~= a.b.c (because this + # is allowed in earlier PEPs) + if COMPARE_OP.match(s): + versions, _ = get_versions(s) + else: + m = VERSION_IDENTIFIER.match(s) + if not m: + raise SyntaxError('invalid constraint: %s' % s) + v = m.groups()[0] + s = s[m.end():].lstrip() + if s: + raise SyntaxError('invalid constraint: %s' % s) + versions = [('~=', v)] + + if remaining: + if remaining[0] != ';': + raise SyntaxError('invalid requirement: %s' % remaining) + remaining = remaining[1:].lstrip() + + mark_expr, remaining = parse_marker(remaining) + + if remaining and remaining[0] != '#': + raise SyntaxError('unexpected trailing data: %s' % remaining) + + if not versions: + rs = distname + else: + rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) + return Container(name=distname, extras=extras, constraints=versions, + marker=mark_expr, url=uri, requirement=rs) + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + + def get_rel_path(root, path): + # normalizes and returns a lstripped-/-separated path + root = root.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(root) + return path[len(root):].lstrip('/') + + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = get_rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = get_rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + +def in_venv(): + if hasattr(sys, 'real_prefix'): + # virtualenv venvs + result = True + else: + # PEP 405 venvs + result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + return result + + +def get_executable(): +# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as +# changes to the stub launcher mean that sys.executable always points +# to the stub on OS X +# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' +# in os.environ): +# result = os.environ['__PYVENV_LAUNCHER__'] +# else: +# result = sys.executable +# return result + # Avoid normcasing: see issue #143 + # result = os.path.normcase(sys.executable) + result = sys.executable + if not isinstance(result, text_type): + result = fsdecode(result) + return result + + +def proceed(prompt, allowed_chars, error_prompt=None, default=None): + p = prompt + while True: + s = raw_input(p) + p = prompt + if not s and default: + s = default + if s: + c = s[0].lower() + if c in allowed_chars: + break + if error_prompt: + p = '%c: %s\n%s' % (c, error_prompt, prompt) + return c + + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + +def read_exports(stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + # Try to load as JSON, falling back on legacy format + data = stream.read() + stream = StringIO(data) + try: + jdata = json.load(stream) + result = jdata['extensions']['python.exports']['exports'] + for group, entries in result.items(): + for k, v in entries.items(): + s = '%s = %s' % (k, v) + entry = get_export_entry(s) + assert entry is not None + entries[k] = entry + return result + except Exception: + stream.seek(0, 0) + + def read_stream(cp, stream): + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + + cp = configparser.ConfigParser() + try: + read_stream(cp, stream) + except configparser.MissingSectionHeaderError: + stream.close() + data = textwrap.dedent(data) + stream = StringIO(data) + read_stream(cp, stream) + + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + #entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getwriter('utf-8')(stream) + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + +@contextlib.contextmanager +def chdir(d): + cwd = os.getcwd() + try: + os.chdir(d) + yield + finally: + os.chdir(cwd) + + +@contextlib.contextmanager +def socket_timeout(seconds=15): + cto = socket.getdefaulttimeout() + try: + socket.setdefaulttimeout(seconds) + yield + finally: + socket.setdefaulttimeout(cto) + + +class cached_property(object): + def __init__(self, func): + self.func = func + #for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) + + def __get__(self, obj, cls=None): + if obj is None: + return self + value = self.func(obj) + object.__setattr__(obj, self.func.__name__, value) + #obj.__dict__[self.func.__name__] = value = self.func(obj) + return value + +def convert_path(pathname): + """Return 'pathname' as a name that will work on the native filesystem. + + The path is split on '/' and put back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while os.curdir in paths: + paths.remove(os.curdir) + if not paths: + return os.curdir + return os.path.join(*paths) + + +class FileOperator(object): + def __init__(self, dry_run=False): + self.dry_run = dry_run + self.ensured = set() + self._init_record() + + def _init_record(self): + self.record = False + self.files_written = set() + self.dirs_created = set() + + def record_as_written(self, path): + if self.record: + self.files_written.add(path) + + def newer(self, source, target): + """Tell if the target is newer than the source. + + Returns true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. + + Returns false if both exist and 'target' is the same age or younger + than 'source'. Raise PackagingFileError if 'source' does not exist. + + Note that this test is not very accurate: files created in the same + second will have the same "age". + """ + if not os.path.exists(source): + raise DistlibException("file '%r' does not exist" % + os.path.abspath(source)) + if not os.path.exists(target): + return True + + return os.stat(source).st_mtime > os.stat(target).st_mtime + + def copy_file(self, infile, outfile, check=True): + """Copy a file respecting dry-run and force flags. + """ + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying %s to %s', infile, outfile) + if not self.dry_run: + msg = None + if check: + if os.path.islink(outfile): + msg = '%s is a symlink' % outfile + elif os.path.exists(outfile) and not os.path.isfile(outfile): + msg = '%s is a non-regular file' % outfile + if msg: + raise ValueError(msg + ' which would be overwritten') + shutil.copyfile(infile, outfile) + self.record_as_written(outfile) + + def copy_stream(self, instream, outfile, encoding=None): + assert not os.path.isdir(outfile) + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying stream %s to %s', instream, outfile) + if not self.dry_run: + if encoding is None: + outstream = open(outfile, 'wb') + else: + outstream = codecs.open(outfile, 'w', encoding=encoding) + try: + shutil.copyfileobj(instream, outstream) + finally: + outstream.close() + self.record_as_written(outfile) + + def write_binary_file(self, path, data): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + if os.path.exists(path): + os.remove(path) + with open(path, 'wb') as f: + f.write(data) + self.record_as_written(path) + + def write_text_file(self, path, data, encoding): + self.write_binary_file(path, data.encode(encoding)) + + def set_mode(self, bits, mask, files): + if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): + # Set the executable bits (owner, group, and world) on + # all the files specified. + for f in files: + if self.dry_run: + logger.info("changing mode of %s", f) + else: + mode = (os.stat(f).st_mode | bits) & mask + logger.info("changing mode of %s to %o", f, mode) + os.chmod(f, mode) + + set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) + + def ensure_dir(self, path): + path = os.path.abspath(path) + if path not in self.ensured and not os.path.exists(path): + self.ensured.add(path) + d, f = os.path.split(path) + self.ensure_dir(d) + logger.info('Creating %s' % path) + if not self.dry_run: + os.mkdir(path) + if self.record: + self.dirs_created.add(path) + + def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): + dpath = cache_from_source(path, not optimize) + logger.info('Byte-compiling %s to %s', path, dpath) + if not self.dry_run: + if force or self.newer(path, dpath): + if not prefix: + diagpath = None + else: + assert path.startswith(prefix) + diagpath = path[len(prefix):] + compile_kwargs = {} + if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): + compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH + py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error + self.record_as_written(dpath) + return dpath + + def ensure_removed(self, path): + if os.path.exists(path): + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record: + if path in self.dirs_created: + self.dirs_created.remove(path) + else: + if os.path.islink(path): + s = 'link' + else: + s = 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record: + if path in self.files_written: + self.files_written.remove(path) + + def is_writable(self, path): + result = False + while not result: + if os.path.exists(path): + result = os.access(path, os.W_OK) + break + parent = os.path.dirname(path) + if parent == path: + break + path = parent + return result + + def commit(self): + """ + Commit recorded changes, turn off recording, return + changes. + """ + assert self.record + result = self.files_written, self.dirs_created + self._init_record() + return result + + def rollback(self): + if not self.dry_run: + for f in list(self.files_written): + if os.path.exists(f): + os.remove(f) + # dirs should all be empty now, except perhaps for + # __pycache__ subdirs + # reverse so that subdirs appear before their parents + dirs = sorted(self.dirs_created, reverse=True) + for d in dirs: + flist = os.listdir(d) + if flist: + assert flist == ['__pycache__'] + sd = os.path.join(d, flist[0]) + os.rmdir(sd) + os.rmdir(d) # should fail if non-empty + self._init_record() + +def resolve(module_name, dotted_path): + if module_name in sys.modules: + mod = sys.modules[module_name] + else: + mod = __import__(module_name) + if dotted_path is None: + result = mod + else: + parts = dotted_path.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + +class ExportEntry(object): + def __init__(self, name, prefix, suffix, flags): + self.name = name + self.prefix = prefix + self.suffix = suffix + self.flags = flags + + @cached_property + def value(self): + return resolve(self.prefix, self.suffix) + + def __repr__(self): # pragma: no cover + return '' % (self.name, self.prefix, + self.suffix, self.flags) + + def __eq__(self, other): + if not isinstance(other, ExportEntry): + result = False + else: + result = (self.name == other.name and + self.prefix == other.prefix and + self.suffix == other.suffix and + self.flags == other.flags) + return result + + __hash__ = object.__hash__ + + +ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+) + \s*=\s*(?P(\w+)([:\.]\w+)*) + \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + ''', re.VERBOSE) + +def get_export_entry(specification): + m = ENTRY_RE.search(specification) + if not m: + result = None + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + else: + d = m.groupdict() + name = d['name'] + path = d['callable'] + colons = path.count(':') + if colons == 0: + prefix, suffix = path, None + else: + if colons != 1: + raise DistlibException("Invalid specification " + "'%s'" % specification) + prefix, suffix = path.split(':') + flags = d['flags'] + if flags is None: + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + flags = [] + else: + flags = [f.strip() for f in flags.split(',')] + result = ExportEntry(name, prefix, suffix, flags) + return result + + +def get_cache_base(suffix=None): + """ + Return the default base location for distlib caches. If the directory does + not exist, it is created. Use the suffix provided for the base directory, + and default to '.distlib' if it isn't provided. + + On Windows, if LOCALAPPDATA is defined in the environment, then it is + assumed to be a directory, and will be the parent directory of the result. + On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home + directory - using os.expanduser('~') - will be the parent directory of + the result. + + The result is just the directory '.distlib' in the parent directory as + determined above, or with the name specified with ``suffix``. + """ + if suffix is None: + suffix = '.distlib' + if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: + result = os.path.expandvars('$localappdata') + else: + # Assume posix, or old Windows + result = os.path.expanduser('~') + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if os.path.isdir(result): + usable = os.access(result, os.W_OK) + if not usable: + logger.warning('Directory exists but is not writable: %s', result) + else: + try: + os.makedirs(result) + usable = True + except OSError: + logger.warning('Unable to create %s', result, exc_info=True) + usable = False + if not usable: + result = tempfile.mkdtemp() + logger.warning('Default location unusable, using %s', result) + return os.path.join(result, suffix) + + +def path_to_cache_dir(path): + """ + Convert an absolute path to a directory name for use in a cache. + + The algorithm used is: + + #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. + #. Any occurrence of ``os.sep`` is replaced with ``'--'``. + #. ``'.cache'`` is appended. + """ + d, p = os.path.splitdrive(os.path.abspath(path)) + if d: + d = d.replace(':', '---') + p = p.replace(os.sep, '--') + return d + p + '.cache' + + +def ensure_slash(s): + if not s.endswith('/'): + return s + '/' + return s + + +def parse_credentials(netloc): + username = password = None + if '@' in netloc: + prefix, netloc = netloc.rsplit('@', 1) + if ':' not in prefix: + username = prefix + else: + username, password = prefix.split(':', 1) + if username: + username = unquote(username) + if password: + password = unquote(password) + return username, password, netloc + + +def get_process_umask(): + result = os.umask(0o22) + os.umask(result) + return result + +def is_string_sequence(seq): + result = True + i = None + for i, s in enumerate(seq): + if not isinstance(s, string_types): + result = False + break + assert i is not None + return result + +PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) +PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') + + +def split_filename(filename, project_name=None): + """ + Extract name, version, python version from a filename (no extension) + + Return name, version, pyver or None + """ + result = None + pyver = None + filename = unquote(filename).replace(' ', '-') + m = PYTHON_VERSION.search(filename) + if m: + pyver = m.group(1) + filename = filename[:m.start()] + if project_name and len(filename) > len(project_name) + 1: + m = re.match(re.escape(project_name) + r'\b', filename) + if m: + n = m.end() + result = filename[:n], filename[n + 1:], pyver + if result is None: + m = PROJECT_NAME_AND_VERSION.match(filename) + if m: + result = m.group(1), m.group(3), pyver + return result + +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' + r'\(\s*(?P[^\s)]+)\)$') + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + +def get_extras(requested, available): + result = set() + requested = set(requested or []) + available = set(available or []) + if '*' in requested: + requested.remove('*') + result |= available + for r in requested: + if r == '-': + result.add(r) + elif r.startswith('-'): + unwanted = r[1:] + if unwanted not in available: + logger.warning('undeclared extra: %s' % unwanted) + if unwanted in result: + result.remove(unwanted) + else: + if r not in available: + logger.warning('undeclared extra: %s' % r) + result.add(r) + return result +# +# Extended metadata functionality +# + +def _get_external_data(url): + result = {} + try: + # urlopen might fail if it runs into redirections, + # because of Python issue #13696. Fixed in locators + # using a custom redirect handler. + resp = urlopen(url) + headers = resp.info() + ct = headers.get('Content-Type') + if not ct.startswith('application/json'): + logger.debug('Unexpected response for JSON request: %s', ct) + else: + reader = codecs.getreader('utf-8')(resp) + #data = reader.read().decode('utf-8') + #result = json.loads(data) + result = json.load(reader) + except Exception as e: + logger.exception('Failed to get external data for %s: %s', url, e) + return result + +_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + +def get_project_data(name): + url = '%s/%s/project.json' % (name[0].upper(), name) + url = urljoin(_external_data_base_url, url) + result = _get_external_data(url) + return result + +def get_package_data(name, version): + url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = urljoin(_external_data_base_url, url) + return _get_external_data(url) + + +class Cache(object): + """ + A class implementing a cache for resources that need to live in the file system + e.g. shared libraries. This class was moved from resources to here because it + could be used by other modules, e.g. the wheel module. + """ + + def __init__(self, base): + """ + Initialise an instance. + + :param base: The base directory where the cache should be located. + """ + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if not os.path.isdir(base): # pragma: no cover + os.makedirs(base) + if (os.stat(base).st_mode & 0o77) != 0: + logger.warning('Directory \'%s\' is not private', base) + self.base = os.path.abspath(os.path.normpath(base)) + + def prefix_to_dir(self, prefix): + """ + Converts a resource prefix to a directory name in the cache. + """ + return path_to_cache_dir(prefix) + + def clear(self): + """ + Clear the cache. + """ + not_removed = [] + for fn in os.listdir(self.base): + fn = os.path.join(self.base, fn) + try: + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + except Exception: + not_removed.append(fn) + return not_removed + + +class EventMixin(object): + """ + A very simple publish/subscribe system. + """ + def __init__(self): + self._subscribers = {} + + def add(self, event, subscriber, append=True): + """ + Add a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be added (and called when the + event is published). + :param append: Whether to append or prepend the subscriber to an + existing subscriber list for the event. + """ + subs = self._subscribers + if event not in subs: + subs[event] = deque([subscriber]) + else: + sq = subs[event] + if append: + sq.append(subscriber) + else: + sq.appendleft(subscriber) + + def remove(self, event, subscriber): + """ + Remove a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be removed. + """ + subs = self._subscribers + if event not in subs: + raise ValueError('No subscribers: %r' % event) + subs[event].remove(subscriber) + + def get_subscribers(self, event): + """ + Return an iterator for the subscribers for an event. + :param event: The event to return subscribers for. + """ + return iter(self._subscribers.get(event, ())) + + def publish(self, event, *args, **kwargs): + """ + Publish a event and return a list of values returned by its + subscribers. + + :param event: The event to publish. + :param args: The positional arguments to pass to the event's + subscribers. + :param kwargs: The keyword arguments to pass to the event's + subscribers. + """ + result = [] + for subscriber in self.get_subscribers(event): + try: + value = subscriber(event, *args, **kwargs) + except Exception: + logger.exception('Exception during event publication') + value = None + result.append(value) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', + event, args, kwargs, result) + return result + +# +# Simple sequencing +# +class Sequencer(object): + def __init__(self): + self._preds = {} + self._succs = {} + self._nodes = set() # nodes with no preds/succs + + def add_node(self, node): + self._nodes.add(node) + + def remove_node(self, node, edges=False): + if node in self._nodes: + self._nodes.remove(node) + if edges: + for p in set(self._preds.get(node, ())): + self.remove(p, node) + for s in set(self._succs.get(node, ())): + self.remove(node, s) + # Remove empties + for k, v in list(self._preds.items()): + if not v: + del self._preds[k] + for k, v in list(self._succs.items()): + if not v: + del self._succs[k] + + def add(self, pred, succ): + assert pred != succ + self._preds.setdefault(succ, set()).add(pred) + self._succs.setdefault(pred, set()).add(succ) + + def remove(self, pred, succ): + assert pred != succ + try: + preds = self._preds[succ] + succs = self._succs[pred] + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of anything' % succ) + try: + preds.remove(pred) + succs.remove(succ) + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of %r' % (succ, pred)) + + def is_step(self, step): + return (step in self._preds or step in self._succs or + step in self._nodes) + + def get_steps(self, final): + if not self.is_step(final): + raise ValueError('Unknown: %r' % final) + result = [] + todo = [] + seen = set() + todo.append(final) + while todo: + step = todo.pop(0) + if step in seen: + # if a step was already seen, + # move it to the end (so it will appear earlier + # when reversed on return) ... but not for the + # final step, as that would be confusing for + # users + if step != final: + result.remove(step) + result.append(step) + else: + seen.add(step) + result.append(step) + preds = self._preds.get(step, ()) + todo.extend(preds) + return reversed(result) + + @property + def strong_connections(self): + #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + index_counter = [0] + stack = [] + lowlinks = {} + index = {} + result = [] + + graph = self._succs + + def strongconnect(node): + # set the depth index for this node to the smallest unused index + index[node] = index_counter[0] + lowlinks[node] = index_counter[0] + index_counter[0] += 1 + stack.append(node) + + # Consider successors + try: + successors = graph[node] + except Exception: + successors = [] + for successor in successors: + if successor not in lowlinks: + # Successor has not yet been visited + strongconnect(successor) + lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + elif successor in stack: + # the successor is in the stack and hence in the current + # strongly connected component (SCC) + lowlinks[node] = min(lowlinks[node],index[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == index[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: break + component = tuple(connected_component) + # storing the result + result.append(component) + + for node in graph: + if node not in lowlinks: + strongconnect(node) + + return result + + @property + def dot(self): + result = ['digraph G {'] + for succ in self._preds: + preds = self._preds[succ] + for pred in preds: + result.append(' %s -> %s;' % (pred, succ)) + for node in self._nodes: + result.append(' %s;' % node) + result.append('}') + return '\n'.join(result) + +# +# Unarchiving functionality for zip, tar, tgz, tbz, whl +# + +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', + '.tgz', '.tbz', '.whl') + +def unarchive(archive_filename, dest_dir, format=None, check=True): + + def check_path(path): + if not isinstance(path, text_type): + path = path.decode('utf-8') + p = os.path.abspath(os.path.join(dest_dir, path)) + if not p.startswith(dest_dir) or p[plen] != os.sep: + raise ValueError('path outside destination: %r' % p) + + dest_dir = os.path.abspath(dest_dir) + plen = len(dest_dir) + archive = None + if format is None: + if archive_filename.endswith(('.zip', '.whl')): + format = 'zip' + elif archive_filename.endswith(('.tar.gz', '.tgz')): + format = 'tgz' + mode = 'r:gz' + elif archive_filename.endswith(('.tar.bz2', '.tbz')): + format = 'tbz' + mode = 'r:bz2' + elif archive_filename.endswith('.tar'): + format = 'tar' + mode = 'r' + else: # pragma: no cover + raise ValueError('Unknown format for %r' % archive_filename) + try: + if format == 'zip': + archive = ZipFile(archive_filename, 'r') + if check: + names = archive.namelist() + for name in names: + check_path(name) + else: + archive = tarfile.open(archive_filename, mode) + if check: + names = archive.getnames() + for name in names: + check_path(name) + if format != 'zip' and sys.version_info[0] < 3: + # See Python issue 17153. If the dest path contains Unicode, + # tarfile extraction fails on Python 2.x if a member path name + # contains non-ASCII characters - it leads to an implicit + # bytes -> unicode conversion using ASCII to decode. + for tarinfo in archive.getmembers(): + if not isinstance(tarinfo.name, text_type): + tarinfo.name = tarinfo.name.decode('utf-8') + archive.extractall(dest_dir) + + finally: + if archive: + archive.close() + + +def zip_dir(directory): + """zip a directory tree into a BytesIO object""" + result = io.BytesIO() + dlen = len(directory) + with ZipFile(result, "w") as zf: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + rel = root[dlen:] + dest = os.path.join(rel, name) + zf.write(full, dest) + return result + +# +# Simple progress bar +# + +UNITS = ('', 'K', 'M', 'G','T','P') + + +class Progress(object): + unknown = 'UNKNOWN' + + def __init__(self, minval=0, maxval=100): + assert maxval is None or maxval >= minval + self.min = self.cur = minval + self.max = maxval + self.started = None + self.elapsed = 0 + self.done = False + + def update(self, curval): + assert self.min <= curval + assert self.max is None or curval <= self.max + self.cur = curval + now = time.time() + if self.started is None: + self.started = now + else: + self.elapsed = now - self.started + + def increment(self, incr): + assert incr >= 0 + self.update(self.cur + incr) + + def start(self): + self.update(self.min) + return self + + def stop(self): + if self.max is not None: + self.update(self.max) + self.done = True + + @property + def maximum(self): + return self.unknown if self.max is None else self.max + + @property + def percentage(self): + if self.done: + result = '100 %' + elif self.max is None: + result = ' ?? %' + else: + v = 100.0 * (self.cur - self.min) / (self.max - self.min) + result = '%3d %%' % v + return result + + def format_duration(self, duration): + if (duration <= 0) and self.max is None or self.cur == self.min: + result = '??:??:??' + #elif duration < 1: + # result = '--:--:--' + else: + result = time.strftime('%H:%M:%S', time.gmtime(duration)) + return result + + @property + def ETA(self): + if self.done: + prefix = 'Done' + t = self.elapsed + #import pdb; pdb.set_trace() + else: + prefix = 'ETA ' + if self.max is None: + t = -1 + elif self.elapsed == 0 or (self.cur == self.min): + t = 0 + else: + #import pdb; pdb.set_trace() + t = float(self.max - self.min) + t /= self.cur - self.min + t = (t - 1) * self.elapsed + return '%s: %s' % (prefix, self.format_duration(t)) + + @property + def speed(self): + if self.elapsed == 0: + result = 0.0 + else: + result = (self.cur - self.min) / self.elapsed + for unit in UNITS: + if result < 1000: + break + result /= 1000.0 + return '%d %sB/s' % (result, unit) + +# +# Glob functionality +# + +RICH_GLOB = re.compile(r'\{([^}]*)\}') +_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') +_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') + + +def iglob(path_glob): + """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" + if _CHECK_RECURSIVE_GLOB.search(path_glob): + msg = """invalid glob %r: recursive glob "**" must be used alone""" + raise ValueError(msg % path_glob) + if _CHECK_MISMATCH_SET.search(path_glob): + msg = """invalid glob %r: mismatching set marker '{' or '}'""" + raise ValueError(msg % path_glob) + return _iglob(path_glob) + + +def _iglob(path_glob): + rich_path_glob = RICH_GLOB.split(path_glob, 1) + if len(rich_path_glob) > 1: + assert len(rich_path_glob) == 3, rich_path_glob + prefix, set, suffix = rich_path_glob + for item in set.split(','): + for path in _iglob(''.join((prefix, item, suffix))): + yield path + else: + if '**' not in path_glob: + for item in std_iglob(path_glob): + yield item + else: + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' + else: + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + for fn in _iglob(os.path.join(path, radical)): + yield fn + +if ssl: + from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, + CertificateError) + + +# +# HTTPSConnection which verifies certificates/matches domains +# + + class HTTPSConnection(httplib.HTTPSConnection): + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None + + # noinspection PyPropertyAccess + def connect(self): + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, '_tunnel_host', False): + self.sock = sock + self._tunnel() + + if not hasattr(ssl, 'SSLContext'): + # For 2.x + if self.ca_certs: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, + cert_reqs=cert_reqs, + ssl_version=ssl.PROTOCOL_SSLv23, + ca_certs=self.ca_certs) + else: # pragma: no cover + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 + if self.cert_file: + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + self.sock = context.wrap_socket(sock, **kwargs) + if self.ca_certs and self.check_domain: + try: + match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) + except CertificateError: # pragma: no cover + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + class HTTPSHandler(BaseHTTPSHandler): + def __init__(self, ca_certs, check_domain=True): + BaseHTTPSHandler.__init__(self) + self.ca_certs = ca_certs + self.check_domain = check_domain + + def _conn_maker(self, *args, **kwargs): + """ + This is called to create a connection instance. Normally you'd + pass a connection class to do_open, but it doesn't actually check for + a class, and just expects a callable. As long as we behave just as a + constructor would have, we should be OK. If it ever changes so that + we *must* pass a class, we'll create an UnsafeHTTPSConnection class + which just sets check_domain to False in the class definition, and + choose which one to pass to do_open. + """ + result = HTTPSConnection(*args, **kwargs) + if self.ca_certs: + result.ca_certs = self.ca_certs + result.check_domain = self.check_domain + return result + + def https_open(self, req): + try: + return self.do_open(self._conn_maker, req) + except URLError as e: + if 'certificate verify failed' in str(e.reason): + raise CertificateError('Unable to verify server certificate ' + 'for %s' % req.host) + else: + raise + + # + # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- + # Middle proxy using HTTP listens on port 443, or an index mistakenly serves + # HTML containing a http://xyz link when it should be https://xyz), + # you can use the following handler class, which does not allow HTTP traffic. + # + # It works by inheriting from HTTPHandler - so build_opener won't add a + # handler for HTTP itself. + # + class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + def http_open(self, req): + raise URLError('Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + +# +# XML-RPC with timeouts +# + +_ver_info = sys.version_info[:2] + +if _ver_info == (2, 6): + class HTTP(httplib.HTTP): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + + if ssl: + class HTTPS(httplib.HTTPS): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + +class Transport(xmlrpclib.Transport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.Transport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, x509 = self.get_host_info(host) + if _ver_info == (2, 6): + result = HTTP(h, timeout=self.timeout) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + result = self._connection[1] + return result + +if ssl: + class SafeTransport(xmlrpclib.SafeTransport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.SafeTransport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, kwargs = self.get_host_info(host) + if not kwargs: + kwargs = {} + kwargs['timeout'] = self.timeout + if _ver_info == (2, 6): + result = HTTPS(host, None, **kwargs) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, + **kwargs) + result = self._connection[1] + return result + + +class ServerProxy(xmlrpclib.ServerProxy): + def __init__(self, uri, **kwargs): + self.timeout = timeout = kwargs.pop('timeout', None) + # The above classes only come into play if a timeout + # is specified + if timeout is not None: + # scheme = splittype(uri) # deprecated as of Python 3.8 + scheme = urlparse(uri)[0] + use_datetime = kwargs.get('use_datetime', 0) + if scheme == 'https': + tcls = SafeTransport + else: + tcls = Transport + kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) + self.transport = t + xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + +# +# CSV functionality. This is provided because on 2.x, the csv module can't +# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. +# + +def _csv_open(fn, mode, **kwargs): + if sys.version_info[0] < 3: + mode += 'b' + else: + kwargs['newline'] = '' + # Python 3 determines encoding from locale. Force 'utf-8' + # file encoding to match other forced utf-8 encoding + kwargs['encoding'] = 'utf-8' + return open(fn, mode, **kwargs) + + +class CSVBase(object): + defaults = { + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) + } + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.stream.close() + + +class CSVReader(CSVBase): + def __init__(self, **kwargs): + if 'stream' in kwargs: + stream = kwargs['stream'] + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + self.stream = stream + else: + self.stream = _csv_open(kwargs['path'], 'r') + self.reader = csv.reader(self.stream, **self.defaults) + + def __iter__(self): + return self + + def next(self): + result = next(self.reader) + if sys.version_info[0] < 3: + for i, item in enumerate(result): + if not isinstance(item, text_type): + result[i] = item.decode('utf-8') + return result + + __next__ = next + +class CSVWriter(CSVBase): + def __init__(self, fn, **kwargs): + self.stream = _csv_open(fn, 'w') + self.writer = csv.writer(self.stream, **self.defaults) + + def writerow(self, row): + if sys.version_info[0] < 3: + r = [] + for item in row: + if isinstance(item, text_type): + item = item.encode('utf-8') + r.append(item) + row = r + self.writer.writerow(row) + +# +# Configurator functionality +# + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result + + +class SubprocessMixin(object): + """ + Mixin for running subprocesses and capturing their output + """ + def __init__(self, verbose=False, progress=None): + self.verbose = verbose + self.progress = progress + + def reader(self, stream, context): + """ + Read lines from a subprocess' output stream and either pass to a progress + callable (if specified) or write progress information to sys.stderr. + """ + progress = self.progress + verbose = self.verbose + while True: + s = stream.readline() + if not s: + break + if progress is not None: + progress(s, context) + else: + if not verbose: + sys.stderr.write('.') + else: + sys.stderr.write(s.decode('utf-8')) + sys.stderr.flush() + stream.close() + + def run_command(self, cmd, **kwargs): + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, **kwargs) + t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) + t1.start() + t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) + t2.start() + p.wait() + t1.join() + t2.join() + if self.progress is not None: + self.progress('done.', 'main') + elif self.verbose: + sys.stderr.write('done.\n') + return p + + +def normalize_name(name): + """Normalize a python package name a la PEP 503""" + # https://www.python.org/dev/peps/pep-0503/#normalized-names + return re.sub('[-_.]+', '-', name).lower() + +# def _get_pypirc_command(): + # """ + # Get the distutils command for interacting with PyPI configurations. + # :return: the command. + # """ + # from distutils.core import Distribution + # from distutils.config import PyPIRCCommand + # d = Distribution() + # return PyPIRCCommand(d) + +class PyPIRCFile(object): + + DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' + DEFAULT_REALM = 'pypi' + + def __init__(self, fn=None, url=None): + if fn is None: + fn = os.path.join(os.path.expanduser('~'), '.pypirc') + self.filename = fn + self.url = url + + def read(self): + result = {} + + if os.path.exists(self.filename): + repository = self.url or self.DEFAULT_REPOSITORY + + config = configparser.RawConfigParser() + config.read(self.filename) + sections = config.sections() + if 'distutils' in sections: + # let's get the list of servers + index_servers = config.get('distutils', 'index-servers') + _servers = [server.strip() for server in + index_servers.split('\n') + if server.strip() != ''] + if _servers == []: + # nothing set, let's try to get the default pypi + if 'pypi' in sections: + _servers = ['pypi'] + else: + for server in _servers: + result = {'server': server} + result['username'] = config.get(server, 'username') + + # optional params + for key, default in (('repository', self.DEFAULT_REPOSITORY), + ('realm', self.DEFAULT_REALM), + ('password', None)): + if config.has_option(server, key): + result[key] = config.get(server, key) + else: + result[key] = default + + # work around people having "repository" for the "pypi" + # section of their config set to the HTTP (rather than + # HTTPS) URL + if (server == 'pypi' and + repository in (self.DEFAULT_REPOSITORY, 'pypi')): + result['repository'] = self.DEFAULT_REPOSITORY + elif (result['server'] != repository and + result['repository'] != repository): + result = {} + elif 'server-login' in sections: + # old format + server = 'server-login' + if config.has_option(server, 'repository'): + repository = config.get(server, 'repository') + else: + repository = self.DEFAULT_REPOSITORY + result = { + 'username': config.get(server, 'username'), + 'password': config.get(server, 'password'), + 'repository': repository, + 'server': server, + 'realm': self.DEFAULT_REALM + } + return result + + def update(self, username, password): + # import pdb; pdb.set_trace() + config = configparser.RawConfigParser() + fn = self.filename + config.read(fn) + if not config.has_section('pypi'): + config.add_section('pypi') + config.set('pypi', 'username', username) + config.set('pypi', 'password', password) + with open(fn, 'w') as f: + config.write(f) + +def _load_pypirc(index): + """ + Read the PyPI access configuration as supported by distutils. + """ + return PyPIRCFile(url=index.url).read() + +def _store_pypirc(index): + PyPIRCFile().update(index.username, index.password) + +# +# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor +# tweaks +# + +def get_host_platform(): + """Return a string that identifies the current platform. This is used mainly to + distinguish platform-specific build directories and platform-specific built + distributions. Typically includes the OS name and version and the + architecture (as supplied by 'os.uname()'), although the exact information + included depends on the OS; eg. on Linux, the kernel version isn't + particularly important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + + """ + if os.name == 'nt': + if 'amd64' in sys.version.lower(): + return 'win-amd64' + if '(arm)' in sys.version.lower(): + return 'win-arm32' + if '(arm64)' in sys.version.lower(): + return 'win-arm64' + return sys.platform + + # Set for cross builds explicitly + if "_PYTHON_HOST_PLATFORM" in os.environ: + return os.environ["_PYTHON_HOST_PLATFORM"] + + if os.name != 'posix' or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + + (osname, host, release, version, machine) = os.uname() + + # Convert the OS name to lowercase, remove '/' characters, and translate + # spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_').replace('/', '-') + + if osname[:5] == 'linux': + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + + elif osname[:5] == 'sunos': + if release[0] >= '5': # SunOS 5 == Solaris 2 + osname = 'solaris' + release = '%d.%s' % (int(release[0]) - 3, release[2:]) + # We can't use 'platform.architecture()[0]' because a + # bootstrap problem. We use a dict to get an error + # if some suspicious happens. + bitness = {2147483647:'32bit', 9223372036854775807:'64bit'} + machine += '.%s' % bitness[sys.maxsize] + # fall through to standard osname-release-machine representation + elif osname[:3] == 'aix': + from _aix_support import aix_platform + return aix_platform() + elif osname[:6] == 'cygwin': + osname = 'cygwin' + rel_re = re.compile (r'[\d.]+', re.ASCII) + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == 'darwin': + import _osx_support, distutils.sysconfig + osname, release, machine = _osx_support.get_platform_osx( + distutils.sysconfig.get_config_vars(), + osname, release, machine) + + return '%s-%s-%s' % (osname, release, machine) + + +_TARGET_TO_PLAT = { + 'x86' : 'win32', + 'x64' : 'win-amd64', + 'arm' : 'win-arm32', +} + + +def get_platform(): + if os.name != 'nt': + return get_host_platform() + cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') + if cross_compilation_target not in _TARGET_TO_PLAT: + return get_host_platform() + return _TARGET_TO_PLAT[cross_compilation_target] diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/version.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/version.py new file mode 100644 index 0000000..c7c8bb6 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/version.py @@ -0,0 +1,739 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Implementation of a flexible versioning scheme providing support for PEP-440, +setuptools-compatible and semantic versioning. +""" + +import logging +import re + +from .compat import string_types +from .util import parse_requirement + +__all__ = ['NormalizedVersion', 'NormalizedMatcher', + 'LegacyVersion', 'LegacyMatcher', + 'SemanticVersion', 'SemanticMatcher', + 'UnsupportedVersionError', 'get_scheme'] + +logger = logging.getLogger(__name__) + + +class UnsupportedVersionError(ValueError): + """This is an unsupported version.""" + pass + + +class Version(object): + def __init__(self, s): + self._string = s = s.strip() + self._parts = parts = self.parse(s) + assert isinstance(parts, tuple) + assert len(parts) > 0 + + def parse(self, s): + raise NotImplementedError('please implement in a subclass') + + def _check_compatible(self, other): + if type(self) != type(other): + raise TypeError('cannot compare %r and %r' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + self._check_compatible(other) + return self._parts < other._parts + + def __gt__(self, other): + return not (self.__lt__(other) or self.__eq__(other)) + + def __le__(self, other): + return self.__lt__(other) or self.__eq__(other) + + def __ge__(self, other): + return self.__gt__(other) or self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self._parts) + + def __repr__(self): + return "%s('%s')" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + @property + def is_prerelease(self): + raise NotImplementedError('Please implement in subclasses.') + + +class Matcher(object): + version_class = None + + # value is either a callable or the name of a method + _operators = { + '<': lambda v, c, p: v < c, + '>': lambda v, c, p: v > c, + '<=': lambda v, c, p: v == c or v < c, + '>=': lambda v, c, p: v == c or v > c, + '==': lambda v, c, p: v == c, + '===': lambda v, c, p: v == c, + # by default, compatible => >=. + '~=': lambda v, c, p: v == c or v > c, + '!=': lambda v, c, p: v != c, + } + + # this is a method only to support alternative implementations + # via overriding + def parse_requirement(self, s): + return parse_requirement(s) + + def __init__(self, s): + if self.version_class is None: + raise ValueError('Please specify a version class') + self._string = s = s.strip() + r = self.parse_requirement(s) + if not r: + raise ValueError('Not valid: %r' % s) + self.name = r.name + self.key = self.name.lower() # for case-insensitive comparisons + clist = [] + if r.constraints: + # import pdb; pdb.set_trace() + for op, s in r.constraints: + if s.endswith('.*'): + if op not in ('==', '!='): + raise ValueError('\'.*\' not allowed for ' + '%r constraints' % op) + # Could be a partial version (e.g. for '2.*') which + # won't parse as a version, so keep it as a string + vn, prefix = s[:-2], True + # Just to check that vn is a valid version + self.version_class(vn) + else: + # Should parse as a version, so we can create an + # instance for the comparison + vn, prefix = self.version_class(s), False + clist.append((op, vn, prefix)) + self._parts = tuple(clist) + + def match(self, version): + """ + Check if the provided version matches the constraints. + + :param version: The version to match against this instance. + :type version: String or :class:`Version` instance. + """ + if isinstance(version, string_types): + version = self.version_class(version) + for operator, constraint, prefix in self._parts: + f = self._operators.get(operator) + if isinstance(f, string_types): + f = getattr(self, f) + if not f: + msg = ('%r not implemented ' + 'for %s' % (operator, self.__class__.__name__)) + raise NotImplementedError(msg) + if not f(version, constraint, prefix): + return False + return True + + @property + def exact_version(self): + result = None + if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): + result = self._parts[0][1] + return result + + def _check_compatible(self, other): + if type(self) != type(other) or self.name != other.name: + raise TypeError('cannot compare %s and %s' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self.key == other.key and self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self.key) + hash(self._parts) + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + +PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' + r'(\.(post)(\d+))?(\.(dev)(\d+))?' + r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') + + +def _pep_440_key(s): + s = s.strip() + m = PEP440_VERSION_RE.match(s) + if not m: + raise UnsupportedVersionError('Not a valid version: %s' % s) + groups = m.groups() + nums = tuple(int(v) for v in groups[1].split('.')) + while len(nums) > 1 and nums[-1] == 0: + nums = nums[:-1] + + if not groups[0]: + epoch = 0 + else: + epoch = int(groups[0][:-1]) + pre = groups[4:6] + post = groups[7:9] + dev = groups[10:12] + local = groups[13] + if pre == (None, None): + pre = () + else: + pre = pre[0], int(pre[1]) + if post == (None, None): + post = () + else: + post = post[0], int(post[1]) + if dev == (None, None): + dev = () + else: + dev = dev[0], int(dev[1]) + if local is None: + local = () + else: + parts = [] + for part in local.split('.'): + # to ensure that numeric compares as > lexicographic, avoid + # comparing them directly, but encode a tuple which ensures + # correct sorting + if part.isdigit(): + part = (1, int(part)) + else: + part = (0, part) + parts.append(part) + local = tuple(parts) + if not pre: + # either before pre-release, or final release and after + if not post and dev: + # before pre-release + pre = ('a', -1) # to sort before a0 + else: + pre = ('z',) # to sort after all pre-releases + # now look at the state of post and dev. + if not post: + post = ('_',) # sort before 'a' + if not dev: + dev = ('final',) + + #print('%s -> %s' % (s, m.groups())) + return epoch, nums, pre, post, dev, local + + +_normalized_key = _pep_440_key + + +class NormalizedVersion(Version): + """A rational version. + + Good: + 1.2 # equivalent to "1.2.0" + 1.2.0 + 1.2a1 + 1.2.3a2 + 1.2.3b1 + 1.2.3c1 + 1.2.3.4 + TODO: fill this out + + Bad: + 1 # minimum two numbers + 1.2a # release level must have a release serial + 1.2.3b + """ + def parse(self, s): + result = _normalized_key(s) + # _normalized_key loses trailing zeroes in the release + # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 + # However, PEP 440 prefix matching needs it: for example, + # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). + m = PEP440_VERSION_RE.match(s) # must succeed + groups = m.groups() + self._release_clause = tuple(int(v) for v in groups[1].split('.')) + return result + + PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) + + @property + def is_prerelease(self): + return any(t[0] in self.PREREL_TAGS for t in self._parts if t) + + +def _match_prefix(x, y): + x = str(x) + y = str(y) + if x == y: + return True + if not x.startswith(y): + return False + n = len(y) + return x[n] == '.' + + +class NormalizedMatcher(Matcher): + version_class = NormalizedVersion + + # value is either a callable or the name of a method + _operators = { + '~=': '_match_compatible', + '<': '_match_lt', + '>': '_match_gt', + '<=': '_match_le', + '>=': '_match_ge', + '==': '_match_eq', + '===': '_match_arbitrary', + '!=': '_match_ne', + } + + def _adjust_local(self, version, constraint, prefix): + if prefix: + strip_local = '+' not in constraint and version._parts[-1] + else: + # both constraint and version are + # NormalizedVersion instances. + # If constraint does not have a local component, + # ensure the version doesn't, either. + strip_local = not constraint._parts[-1] and version._parts[-1] + if strip_local: + s = version._string.split('+', 1)[0] + version = self.version_class(s) + return version, constraint + + def _match_lt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version >= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_gt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version <= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_le(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version <= constraint + + def _match_ge(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version >= constraint + + def _match_eq(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version == constraint) + else: + result = _match_prefix(version, constraint) + return result + + def _match_arbitrary(self, version, constraint, prefix): + return str(version) == str(constraint) + + def _match_ne(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version != constraint) + else: + result = not _match_prefix(version, constraint) + return result + + def _match_compatible(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version == constraint: + return True + if version < constraint: + return False +# if not prefix: +# return True + release_clause = constraint._release_clause + if len(release_clause) > 1: + release_clause = release_clause[:-1] + pfx = '.'.join([str(i) for i in release_clause]) + return _match_prefix(version, pfx) + +_REPLACEMENTS = ( + (re.compile('[.+-]$'), ''), # remove trailing puncts + (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start + (re.compile('^[.-]'), ''), # remove leading puncts + (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses + (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha + (re.compile(r'\b(pre-alpha|prealpha)\b'), + 'pre.alpha'), # standardise + (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses +) + +_SUFFIX_REPLACEMENTS = ( + (re.compile('^[:~._+-]+'), ''), # remove leading puncts + (re.compile('[,*")([\\]]'), ''), # remove unwanted chars + (re.compile('[~:+_ -]'), '.'), # replace illegal chars + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\.$'), ''), # trailing '.' +) + +_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') + + +def _suggest_semantic_version(s): + """ + Try to suggest a semantic form for a version for which + _suggest_normalized_version couldn't come up with anything. + """ + result = s.strip().lower() + for pat, repl in _REPLACEMENTS: + result = pat.sub(repl, result) + if not result: + result = '0.0.0' + + # Now look for numeric prefix, and separate it out from + # the rest. + #import pdb; pdb.set_trace() + m = _NUMERIC_PREFIX.match(result) + if not m: + prefix = '0.0.0' + suffix = result + else: + prefix = m.groups()[0].split('.') + prefix = [int(i) for i in prefix] + while len(prefix) < 3: + prefix.append(0) + if len(prefix) == 3: + suffix = result[m.end():] + else: + suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] + prefix = prefix[:3] + prefix = '.'.join([str(i) for i in prefix]) + suffix = suffix.strip() + if suffix: + #import pdb; pdb.set_trace() + # massage the suffix. + for pat, repl in _SUFFIX_REPLACEMENTS: + suffix = pat.sub(repl, suffix) + + if not suffix: + result = prefix + else: + sep = '-' if 'dev' in suffix else '+' + result = prefix + sep + suffix + if not is_semver(result): + result = None + return result + + +def _suggest_normalized_version(s): + """Suggest a normalized version close to the given version string. + + If you have a version string that isn't rational (i.e. NormalizedVersion + doesn't like it) then you might be able to get an equivalent (or close) + rational version from this function. + + This does a number of simple normalizations to the given string, based + on observation of versions currently in use on PyPI. Given a dump of + those version during PyCon 2009, 4287 of them: + - 2312 (53.93%) match NormalizedVersion without change + with the automatic suggestion + - 3474 (81.04%) match when using this suggestion method + + @param s {str} An irrational version string. + @returns A rational version string, or None, if couldn't determine one. + """ + try: + _normalized_key(s) + return s # already rational + except UnsupportedVersionError: + pass + + rs = s.lower() + + # part of this could use maketrans + for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), + ('beta', 'b'), ('rc', 'c'), ('-final', ''), + ('-pre', 'c'), + ('-release', ''), ('.release', ''), ('-stable', ''), + ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), + ('final', '')): + rs = rs.replace(orig, repl) + + # if something ends with dev or pre, we add a 0 + rs = re.sub(r"pre$", r"pre0", rs) + rs = re.sub(r"dev$", r"dev0", rs) + + # if we have something like "b-2" or "a.2" at the end of the + # version, that is probably beta, alpha, etc + # let's remove the dash or dot + rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) + + # 1.0-dev-r371 -> 1.0.dev371 + # 0.1-dev-r79 -> 0.1.dev79 + rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) + + # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 + rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) + + # Clean: v0.3, v1.0 + if rs.startswith('v'): + rs = rs[1:] + + # Clean leading '0's on numbers. + #TODO: unintended side-effect on, e.g., "2003.05.09" + # PyPI stats: 77 (~2%) better + rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) + + # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers + # zero. + # PyPI stats: 245 (7.56%) better + rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) + + # the 'dev-rNNN' tag is a dev tag + rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) + + # clean the - when used as a pre delimiter + rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) + + # a terminal "dev" or "devel" can be changed into ".dev0" + rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) + + # a terminal "dev" can be changed into ".dev0" + rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) + + # a terminal "final" or "stable" can be removed + rs = re.sub(r"(final|stable)$", "", rs) + + # The 'r' and the '-' tags are post release tags + # 0.4a1.r10 -> 0.4a1.post10 + # 0.9.33-17222 -> 0.9.33.post17222 + # 0.9.33-r17222 -> 0.9.33.post17222 + rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) + + # Clean 'r' instead of 'dev' usage: + # 0.9.33+r17222 -> 0.9.33.dev17222 + # 1.0dev123 -> 1.0.dev123 + # 1.0.git123 -> 1.0.dev123 + # 1.0.bzr123 -> 1.0.dev123 + # 0.1a0dev.123 -> 0.1a0.dev123 + # PyPI stats: ~150 (~4%) better + rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) + + # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: + # 0.2.pre1 -> 0.2c1 + # 0.2-c1 -> 0.2c1 + # 1.0preview123 -> 1.0c123 + # PyPI stats: ~21 (0.62%) better + rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) + + # Tcl/Tk uses "px" for their post release markers + rs = re.sub(r"p(\d+)$", r".post\1", rs) + + try: + _normalized_key(rs) + except UnsupportedVersionError: + rs = None + return rs + +# +# Legacy version processing (distribute-compatible) +# + +_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) +_VERSION_REPLACE = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + '': None, + '.': None, +} + + +def _legacy_key(s): + def get_parts(s): + result = [] + for p in _VERSION_PART.split(s.lower()): + p = _VERSION_REPLACE.get(p, p) + if p: + if '0' <= p[:1] <= '9': + p = p.zfill(8) + else: + p = '*' + p + result.append(p) + result.append('*final') + return result + + result = [] + for p in get_parts(s): + if p.startswith('*'): + if p < '*final': + while result and result[-1] == '*final-': + result.pop() + while result and result[-1] == '00000000': + result.pop() + result.append(p) + return tuple(result) + + +class LegacyVersion(Version): + def parse(self, s): + return _legacy_key(s) + + @property + def is_prerelease(self): + result = False + for x in self._parts: + if (isinstance(x, string_types) and x.startswith('*') and + x < '*final'): + result = True + break + return result + + +class LegacyMatcher(Matcher): + version_class = LegacyVersion + + _operators = dict(Matcher._operators) + _operators['~='] = '_match_compatible' + + numeric_re = re.compile(r'^(\d+(\.\d+)*)') + + def _match_compatible(self, version, constraint, prefix): + if version < constraint: + return False + m = self.numeric_re.match(str(constraint)) + if not m: + logger.warning('Cannot compute compatible match for version %s ' + ' and constraint %s', version, constraint) + return True + s = m.groups()[0] + if '.' in s: + s = s.rsplit('.', 1)[0] + return _match_prefix(version, s) + +# +# Semantic versioning +# + +_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' + r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' + r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) + + +def is_semver(s): + return _SEMVER_RE.match(s) + + +def _semantic_key(s): + def make_tuple(s, absent): + if s is None: + result = (absent,) + else: + parts = s[1:].split('.') + # We can't compare ints and strings on Python 3, so fudge it + # by zero-filling numeric values so simulate a numeric comparison + result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) + return result + + m = is_semver(s) + if not m: + raise UnsupportedVersionError(s) + groups = m.groups() + major, minor, patch = [int(i) for i in groups[:3]] + # choose the '|' and '*' so that versions sort correctly + pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') + return (major, minor, patch), pre, build + + +class SemanticVersion(Version): + def parse(self, s): + return _semantic_key(s) + + @property + def is_prerelease(self): + return self._parts[1][0] != '|' + + +class SemanticMatcher(Matcher): + version_class = SemanticVersion + + +class VersionScheme(object): + def __init__(self, key, matcher, suggester=None): + self.key = key + self.matcher = matcher + self.suggester = suggester + + def is_valid_version(self, s): + try: + self.matcher.version_class(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_matcher(self, s): + try: + self.matcher(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_constraint_list(self, s): + """ + Used for processing some metadata fields + """ + # See issue #140. Be tolerant of a single trailing comma. + if s.endswith(','): + s = s[:-1] + return self.is_valid_matcher('dummy_name (%s)' % s) + + def suggest(self, s): + if self.suggester is None: + result = None + else: + result = self.suggester(s) + return result + +_SCHEMES = { + 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, + _suggest_normalized_version), + 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), + 'semantic': VersionScheme(_semantic_key, SemanticMatcher, + _suggest_semantic_version), +} + +_SCHEMES['default'] = _SCHEMES['normalized'] + + +def get_scheme(name): + if name not in _SCHEMES: + raise ValueError('unknown scheme name: %r' % name) + return _SCHEMES[name] diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/wheel.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/wheel.py new file mode 100644 index 0000000..48abfde --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distlib/wheel.py @@ -0,0 +1,1053 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2020 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import base64 +import codecs +import datetime +from email import message_from_file +import hashlib +import imp +import json +import logging +import os +import posixpath +import re +import shutil +import sys +import tempfile +import zipfile + +from . import __version__, DistlibException +from .compat import sysconfig, ZipFile, fsdecode, text_type, filter +from .database import InstalledDistribution +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) +from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, + cached_property, get_cache_base, read_exports, tempdir, + get_platform) +from .version import NormalizedVersion, UnsupportedVersionError + +logger = logging.getLogger(__name__) + +cache = None # created when needed + +if hasattr(sys, 'pypy_version_info'): # pragma: no cover + IMP_PREFIX = 'pp' +elif sys.platform.startswith('java'): # pragma: no cover + IMP_PREFIX = 'jy' +elif sys.platform == 'cli': # pragma: no cover + IMP_PREFIX = 'ip' +else: + IMP_PREFIX = 'cp' + +VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') +if not VER_SUFFIX: # pragma: no cover + VER_SUFFIX = '%s%s' % sys.version_info[:2] +PYVER = 'py' + VER_SUFFIX +IMPVER = IMP_PREFIX + VER_SUFFIX + +ARCH = get_platform().replace('-', '_').replace('.', '_') + +ABI = sysconfig.get_config_var('SOABI') +if ABI and ABI.startswith('cpython-'): + ABI = ABI.replace('cpython-', 'cp').split('-')[0] +else: + def _derive_abi(): + parts = ['cp', VER_SUFFIX] + if sysconfig.get_config_var('Py_DEBUG'): + parts.append('d') + if sysconfig.get_config_var('WITH_PYMALLOC'): + parts.append('m') + if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: + parts.append('u') + return ''.join(parts) + ABI = _derive_abi() + del _derive_abi + +FILENAME_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))? +-(?P\w+\d+(\.\w+\d+)*) +-(?P\w+) +-(?P\w+(\.\w+)*) +\.whl$ +''', re.IGNORECASE | re.VERBOSE) + +NAME_VERSION_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))?$ +''', re.IGNORECASE | re.VERBOSE) + +SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') +SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') +SHEBANG_PYTHON = b'#!python' +SHEBANG_PYTHONW = b'#!pythonw' + +if os.sep == '/': + to_posix = lambda o: o +else: + to_posix = lambda o: o.replace(os.sep, '/') + + +class Mounter(object): + def __init__(self): + self.impure_wheels = {} + self.libs = {} + + def add(self, pathname, extensions): + self.impure_wheels[pathname] = extensions + self.libs.update(extensions) + + def remove(self, pathname): + extensions = self.impure_wheels.pop(pathname) + for k, v in extensions: + if k in self.libs: + del self.libs[k] + + def find_module(self, fullname, path=None): + if fullname in self.libs: + result = self + else: + result = None + return result + + def load_module(self, fullname): + if fullname in sys.modules: + result = sys.modules[fullname] + else: + if fullname not in self.libs: + raise ImportError('unable to find extension for %s' % fullname) + result = imp.load_dynamic(fullname, self.libs[fullname]) + result.__loader__ = self + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] + return result + +_hook = Mounter() + + +class Wheel(object): + """ + Class to build and install from Wheel files (PEP 427). + """ + + wheel_version = (1, 1) + hash_kind = 'sha256' + + def __init__(self, filename=None, sign=False, verify=False): + """ + Initialise an instance using a (valid) filename. + """ + self.sign = sign + self.should_verify = verify + self.buildver = '' + self.pyver = [PYVER] + self.abi = ['none'] + self.arch = ['any'] + self.dirname = os.getcwd() + if filename is None: + self.name = 'dummy' + self.version = '0.1' + self._filename = self.filename + else: + m = NAME_VERSION_RE.match(filename) + if m: + info = m.groupdict('') + self.name = info['nm'] + # Reinstate the local version separator + self.version = info['vn'].replace('_', '-') + self.buildver = info['bn'] + self._filename = self.filename + else: + dirname, filename = os.path.split(filename) + m = FILENAME_RE.match(filename) + if not m: + raise DistlibException('Invalid name or ' + 'filename: %r' % filename) + if dirname: + self.dirname = os.path.abspath(dirname) + self._filename = filename + info = m.groupdict('') + self.name = info['nm'] + self.version = info['vn'] + self.buildver = info['bn'] + self.pyver = info['py'].split('.') + self.abi = info['bi'].split('.') + self.arch = info['ar'].split('.') + + @property + def filename(self): + """ + Build and return a filename from the various components. + """ + if self.buildver: + buildver = '-' + self.buildver + else: + buildver = '' + pyver = '.'.join(self.pyver) + abi = '.'.join(self.abi) + arch = '.'.join(self.arch) + # replace - with _ as a local version separator + version = self.version.replace('-', '_') + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, + pyver, abi, arch) + + @property + def exists(self): + path = os.path.join(self.dirname, self.filename) + return os.path.isfile(path) + + @property + def tags(self): + for pyver in self.pyver: + for abi in self.abi: + for arch in self.arch: + yield pyver, abi, arch + + @cached_property + def metadata(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + wrapper = codecs.getreader('utf-8') + with ZipFile(pathname, 'r') as zf: + wheel_metadata = self.get_wheel_metadata(zf) + wv = wheel_metadata['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # if file_version < (1, 1): + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] + # else: + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] + result = None + for fn in fns: + try: + metadata_filename = posixpath.join(info_dir, fn) + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + if result: + break + except KeyError: + pass + if not result: + raise ValueError('Invalid wheel, because metadata is ' + 'missing: looked in %s' % ', '.join(fns)) + return result + + def get_wheel_metadata(self, zf): + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + metadata_filename = posixpath.join(info_dir, 'WHEEL') + with zf.open(metadata_filename) as bf: + wf = codecs.getreader('utf-8')(bf) + message = message_from_file(wf) + return dict(message) + + @cached_property + def info(self): + pathname = os.path.join(self.dirname, self.filename) + with ZipFile(pathname, 'r') as zf: + result = self.get_wheel_metadata(zf) + return result + + def process_shebang(self, data): + m = SHEBANG_RE.match(data) + if m: + end = m.end() + shebang, data_after_shebang = data[:end], data[end:] + # Preserve any arguments after the interpreter + if b'pythonw' in shebang.lower(): + shebang_python = SHEBANG_PYTHONW + else: + shebang_python = SHEBANG_PYTHON + m = SHEBANG_DETAIL_RE.match(shebang) + if m: + args = b' ' + m.groups()[-1] + else: + args = b'' + shebang = shebang_python + args + data = shebang + data_after_shebang + else: + cr = data.find(b'\r') + lf = data.find(b'\n') + if cr < 0 or cr > lf: + term = b'\n' + else: + if data[cr:cr + 2] == b'\r\n': + term = b'\r\n' + else: + term = b'\r' + data = SHEBANG_PYTHON + term + data + return data + + def get_hash(self, data, hash_kind=None): + if hash_kind is None: + hash_kind = self.hash_kind + try: + hasher = getattr(hashlib, hash_kind) + except AttributeError: + raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + result = hasher(data).digest() + result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') + return hash_kind, result + + def write_record(self, records, record_path, base): + records = list(records) # make a copy, as mutated + p = to_posix(os.path.relpath(record_path, base)) + records.append((p, '', '')) + with CSVWriter(record_path) as writer: + for row in records: + writer.writerow(row) + + def write_records(self, info, libdir, archive_paths): + records = [] + distinfo, info_dir = info + hasher = getattr(hashlib, self.hash_kind) + for ap, p in archive_paths: + with open(p, 'rb') as f: + data = f.read() + digest = '%s=%s' % self.get_hash(data) + size = os.path.getsize(p) + records.append((ap, digest, size)) + + p = os.path.join(distinfo, 'RECORD') + self.write_record(records, p, libdir) + ap = to_posix(os.path.join(info_dir, 'RECORD')) + archive_paths.append((ap, p)) + + def build_zip(self, pathname, archive_paths): + with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: + for ap, p in archive_paths: + logger.debug('Wrote %s to %s in wheel', p, ap) + zf.write(p, ap) + + def build(self, paths, tags=None, wheel_version=None): + """ + Build a wheel from files in specified paths, and use any specified tags + when determining the name of the wheel. + """ + if tags is None: + tags = {} + + libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] + if libkey == 'platlib': + is_pure = 'false' + default_pyver = [IMPVER] + default_abi = [ABI] + default_arch = [ARCH] + else: + is_pure = 'true' + default_pyver = [PYVER] + default_abi = ['none'] + default_arch = ['any'] + + self.pyver = tags.get('pyver', default_pyver) + self.abi = tags.get('abi', default_abi) + self.arch = tags.get('arch', default_arch) + + libdir = paths[libkey] + + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + archive_paths = [] + + # First, stuff which is not in site-packages + for key in ('data', 'headers', 'scripts'): + if key not in paths: + continue + path = paths[key] + if os.path.isdir(path): + for root, dirs, files in os.walk(path): + for fn in files: + p = fsdecode(os.path.join(root, fn)) + rp = os.path.relpath(p, path) + ap = to_posix(os.path.join(data_dir, key, rp)) + archive_paths.append((ap, p)) + if key == 'scripts' and not p.endswith('.exe'): + with open(p, 'rb') as f: + data = f.read() + data = self.process_shebang(data) + with open(p, 'wb') as f: + f.write(data) + + # Now, stuff which is in site-packages, other than the + # distinfo stuff. + path = libdir + distinfo = None + for root, dirs, files in os.walk(path): + if root == path: + # At the top level only, save distinfo for later + # and skip it for now + for i, dn in enumerate(dirs): + dn = fsdecode(dn) + if dn.endswith('.dist-info'): + distinfo = os.path.join(root, dn) + del dirs[i] + break + assert distinfo, '.dist-info directory expected, not found' + + for fn in files: + # comment out next suite to leave .pyc files in + if fsdecode(fn).endswith(('.pyc', '.pyo')): + continue + p = os.path.join(root, fn) + rp = to_posix(os.path.relpath(p, path)) + archive_paths.append((rp, p)) + + # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. + files = os.listdir(distinfo) + for fn in files: + if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): + p = fsdecode(os.path.join(distinfo, fn)) + ap = to_posix(os.path.join(info_dir, fn)) + archive_paths.append((ap, p)) + + wheel_metadata = [ + 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), + 'Generator: distlib %s' % __version__, + 'Root-Is-Purelib: %s' % is_pure, + ] + for pyver, abi, arch in self.tags: + wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) + p = os.path.join(distinfo, 'WHEEL') + with open(p, 'w') as f: + f.write('\n'.join(wheel_metadata)) + ap = to_posix(os.path.join(info_dir, 'WHEEL')) + archive_paths.append((ap, p)) + + # sort the entries by archive path. Not needed by any spec, but it + # keeps the archive listing and RECORD tidier than they would otherwise + # be. Use the number of path segments to keep directory entries together, + # and keep the dist-info stuff at the end. + def sorter(t): + ap = t[0] + n = ap.count('/') + if '.dist-info' in ap: + n += 10000 + return (n, ap) + archive_paths = sorted(archive_paths, key=sorter) + + # Now, at last, RECORD. + # Paths in here are archive paths - nothing else makes sense. + self.write_records((distinfo, info_dir), libdir, archive_paths) + # Now, ready to build the zip file + pathname = os.path.join(self.dirname, self.filename) + self.build_zip(pathname, archive_paths) + return pathname + + def skip_entry(self, arcname): + """ + Determine whether an archive entry should be skipped when verifying + or installing. + """ + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + # We also skip directories, as they won't be in RECORD + # either. See: + # + # https://github.com/pypa/wheel/issues/294 + # https://github.com/pypa/wheel/issues/287 + # https://github.com/pypa/wheel/pull/289 + # + return arcname.endswith(('/', '/RECORD.jws')) + + def install(self, paths, maker, **kwargs): + """ + Install a wheel to the specified paths. If kwarg ``warner`` is + specified, it should be a callable, which will be called with two + tuples indicating the wheel version of this software and the wheel + version in the file, if there is a discrepancy in the versions. + This can be used to issue any warnings to raise any exceptions. + If kwarg ``lib_only`` is True, only the purelib/platlib files are + installed, and the headers, scripts, data and dist-info metadata are + not written. If kwarg ``bytecode_hashed_invalidation`` is True, written + bytecode will try to use file-hash based invalidation (PEP-552) on + supported interpreter versions (CPython 2.7+). + + The return value is a :class:`InstalledDistribution` instance unless + ``options.lib_only`` is True, in which case the return value is ``None``. + """ + + dry_run = maker.dry_run + warner = kwargs.get('warner') + lib_only = kwargs.get('lib_only', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if (file_version != self.wheel_version) and warner: + warner(self.wheel_version, file_version) + + if message['Root-Is-Purelib'] == 'true': + libdir = paths['purelib'] + else: + libdir = paths['platlib'] + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') + script_pfx = posixpath.join(data_dir, 'scripts', '') + + # make a new instance rather than a copy of maker's, + # as we mutate it + fileop = FileOperator(dry_run=dry_run) + fileop.record = True # so we can rollback if needed + + bc = not sys.dont_write_bytecode # Double negatives. Lovely! + + outfiles = [] # for RECORD writing + + # for script copying/shebang processing + workdir = tempfile.mkdtemp() + # set target dir later + # we default add_launchers to False, as the + # Python Launcher should be used instead + maker.source_dir = workdir + maker.target_dir = None + try: + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue + is_script = (u_arcname.startswith(script_pfx) + and not u_arcname.endswith('.exe')) + + if u_arcname.startswith(data_pfx): + _, where, rp = u_arcname.split('/', 2) + outfile = os.path.join(paths[where], convert_path(rp)) + else: + # meant for site-packages. + if u_arcname in (wheel_metadata_name, record_name): + continue + outfile = os.path.join(libdir, convert_path(u_arcname)) + if not is_script: + with zf.open(arcname) as bf: + fileop.copy_stream(bf, outfile) + # Issue #147: permission bits aren't preserved. Using + # zf.extract(zinfo, libdir) should have worked, but didn't, + # see https://www.thetopsites.net/article/53834422.shtml + # So ... manually preserve permission bits as given in zinfo + if os.name == 'posix': + # just set the normal permission bits + os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF) + outfiles.append(outfile) + # Double check the digest of the written file + if not dry_run and row[1]: + with open(outfile, 'rb') as bf: + data = bf.read() + _, newdigest = self.get_hash(data, kind) + if newdigest != digest: + raise DistlibException('digest mismatch ' + 'on write for ' + '%s' % outfile) + if bc and outfile.endswith('.py'): + try: + pyc = fileop.byte_compile(outfile, + hashed_invalidation=bc_hashed_invalidation) + outfiles.append(pyc) + except Exception: + # Don't give up if byte-compilation fails, + # but log it and perhaps warn the user + logger.warning('Byte-compilation failed', + exc_info=True) + else: + fn = os.path.basename(convert_path(arcname)) + workname = os.path.join(workdir, fn) + with zf.open(arcname) as bf: + fileop.copy_stream(bf, workname) + + dn, fn = os.path.split(outfile) + maker.target_dir = dn + filenames = maker.make(fn) + fileop.set_executable_mode(filenames) + outfiles.extend(filenames) + + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + # Generate scripts + + # Try to get pydist.json so we can see if there are + # any commands to generate. If this fails (e.g. because + # of a legacy wheel), log a warning but don't give up. + commands = None + file_version = self.info['Wheel-Version'] + if file_version == '1.0': + # Use legacy info + ep = posixpath.join(info_dir, 'entry_points.txt') + try: + with zf.open(ep) as bwf: + epdata = read_exports(bwf) + commands = {} + for key in ('console', 'gui'): + k = '%s_scripts' % key + if k in epdata: + commands['wrap_%s' % key] = d = {} + for v in epdata[k].values(): + s = '%s:%s' % (v.prefix, v.suffix) + if v.flags: + s += ' [%s]' % ','.join(v.flags) + d[v.name] = s + except Exception: + logger.warning('Unable to read legacy script ' + 'metadata, so cannot generate ' + 'scripts') + else: + try: + with zf.open(metadata_name) as bwf: + wf = wrapper(bwf) + commands = json.load(wf).get('extensions') + if commands: + commands = commands.get('python.commands') + except Exception: + logger.warning('Unable to read JSON metadata, so ' + 'cannot generate scripts') + if commands: + console_scripts = commands.get('wrap_console', {}) + gui_scripts = commands.get('wrap_gui', {}) + if console_scripts or gui_scripts: + script_dir = paths.get('scripts', '') + if not os.path.isdir(script_dir): + raise ValueError('Valid script path not ' + 'specified') + maker.target_dir = script_dir + for k, v in console_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script) + fileop.set_executable_mode(filenames) + + if gui_scripts: + options = {'gui': True } + for k, v in gui_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script, options) + fileop.set_executable_mode(filenames) + + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], + dry_run) + return dist + except Exception: # pragma: no cover + logger.exception('installation failed.') + fileop.rollback() + raise + finally: + shutil.rmtree(workdir) + + def _get_dylib_cache(self): + global cache + if cache is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('dylib-cache'), + '%s.%s' % sys.version_info[:2]) + cache = Cache(base) + return cache + + def _get_extensions(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + arcname = posixpath.join(info_dir, 'EXTENSIONS') + wrapper = codecs.getreader('utf-8') + result = [] + with ZipFile(pathname, 'r') as zf: + try: + with zf.open(arcname) as bf: + wf = wrapper(bf) + extensions = json.load(wf) + cache = self._get_dylib_cache() + prefix = cache.prefix_to_dir(pathname) + cache_base = os.path.join(cache.base, prefix) + if not os.path.isdir(cache_base): + os.makedirs(cache_base) + for name, relpath in extensions.items(): + dest = os.path.join(cache_base, convert_path(relpath)) + if not os.path.exists(dest): + extract = True + else: + file_time = os.stat(dest).st_mtime + file_time = datetime.datetime.fromtimestamp(file_time) + info = zf.getinfo(relpath) + wheel_time = datetime.datetime(*info.date_time) + extract = wheel_time > file_time + if extract: + zf.extract(relpath, cache_base) + result.append((name, dest)) + except KeyError: + pass + return result + + def is_compatible(self): + """ + Determine if a wheel is compatible with the running system. + """ + return is_compatible(self) + + def is_mountable(self): + """ + Determine if a wheel is asserted as mountable by its metadata. + """ + return True # for now - metadata details TBD + + def mount(self, append=False): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if not self.is_compatible(): + msg = 'Wheel %s not compatible with this Python.' % pathname + raise DistlibException(msg) + if not self.is_mountable(): + msg = 'Wheel %s is marked as not mountable.' % pathname + raise DistlibException(msg) + if pathname in sys.path: + logger.debug('%s already in path', pathname) + else: + if append: + sys.path.append(pathname) + else: + sys.path.insert(0, pathname) + extensions = self._get_extensions() + if extensions: + if _hook not in sys.meta_path: + sys.meta_path.append(_hook) + _hook.add(pathname, extensions) + + def unmount(self): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if pathname not in sys.path: + logger.debug('%s not in path', pathname) + else: + sys.path.remove(pathname) + if pathname in _hook.impure_wheels: + _hook.remove(pathname) + if not _hook.impure_wheels: + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + def verify(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # TODO version verification + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + # See issue #115: some wheels have .. in their entries, but + # in the filename ... e.g. __main__..py ! So the check is + # updated to look for .. in the directory portions + p = u_arcname.split('/') + if '..' in p: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + def update(self, modifier, dest_dir=None, **kwargs): + """ + Update the contents of a wheel in a generic way. The modifier should + be a callable which expects a dictionary argument: its keys are + archive-entry paths, and its values are absolute filesystem paths + where the contents the corresponding archive entries can be found. The + modifier is free to change the contents of the files pointed to, add + new entries and remove entries, before returning. This method will + extract the entire contents of the wheel to a temporary location, call + the modifier, and then use the passed (and possibly updated) + dictionary to write a new wheel. If ``dest_dir`` is specified, the new + wheel is written there -- otherwise, the original wheel is overwritten. + + The modifier should return True if it updated the wheel, else False. + This method returns the same value the modifier returns. + """ + + def get_version(path_map, info_dir): + version = path = None + key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) + if key not in path_map: + key = '%s/PKG-INFO' % info_dir + if key in path_map: + path = path_map[key] + version = Metadata(path=path).version + return version, path + + def update_version(version, path): + updated = None + try: + v = NormalizedVersion(version) + i = version.find('-') + if i < 0: + updated = '%s+1' % version + else: + parts = [int(s) for s in version[i + 1:].split('.')] + parts[-1] += 1 + updated = '%s+%s' % (version[:i], + '.'.join(str(i) for i in parts)) + except UnsupportedVersionError: + logger.debug('Cannot update non-compliant (PEP-440) ' + 'version %r', version) + if updated: + md = Metadata(path=path) + md.version = updated + legacy = path.endswith(LEGACY_METADATA_FILENAME) + md.write(path=path, legacy=legacy) + logger.debug('Version updated from %r to %r', version, + updated) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + record_name = posixpath.join(info_dir, 'RECORD') + with tempdir() as workdir: + with ZipFile(pathname, 'r') as zf: + path_map = {} + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if u_arcname == record_name: + continue + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + zf.extract(zinfo, workdir) + path = os.path.join(workdir, convert_path(u_arcname)) + path_map[u_arcname] = path + + # Remember the version. + original_version, _ = get_version(path_map, info_dir) + # Files extracted. Call the modifier. + modified = modifier(path_map, **kwargs) + if modified: + # Something changed - need to build a new wheel. + current_version, path = get_version(path_map, info_dir) + if current_version and (current_version == original_version): + # Add or update local version to signify changes. + update_version(current_version, path) + # Decide where the new wheel goes. + if dest_dir is None: + fd, newpath = tempfile.mkstemp(suffix='.whl', + prefix='wheel-update-', + dir=workdir) + os.close(fd) + else: + if not os.path.isdir(dest_dir): + raise DistlibException('Not a directory: %r' % dest_dir) + newpath = os.path.join(dest_dir, self.filename) + archive_paths = list(path_map.items()) + distinfo = os.path.join(workdir, info_dir) + info = distinfo, info_dir + self.write_records(info, workdir, archive_paths) + self.build_zip(newpath, archive_paths) + if dest_dir is None: + shutil.copyfile(newpath, pathname) + return modified + +def _get_glibc_version(): + import platform + ver = platform.libc_ver() + result = [] + if ver[0] == 'glibc': + for s in ver[1].split('.'): + result.append(int(s) if s.isdigit() else 0) + result = tuple(result) + return result + +def compatible_tags(): + """ + Return (pyver, abi, arch) tuples compatible with this Python. + """ + versions = [VER_SUFFIX] + major = VER_SUFFIX[0] + for minor in range(sys.version_info[1] - 1, - 1, -1): + versions.append(''.join([major, str(minor)])) + + abis = [] + for suffix, _, _ in imp.get_suffixes(): + if suffix.startswith('.abi'): + abis.append(suffix.split('.', 2)[1]) + abis.sort() + if ABI != 'none': + abis.insert(0, ABI) + abis.append('none') + result = [] + + arches = [ARCH] + if sys.platform == 'darwin': + m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) + if m: + name, major, minor, arch = m.groups() + minor = int(minor) + matches = [arch] + if arch in ('i386', 'ppc'): + matches.append('fat') + if arch in ('i386', 'ppc', 'x86_64'): + matches.append('fat3') + if arch in ('ppc64', 'x86_64'): + matches.append('fat64') + if arch in ('i386', 'x86_64'): + matches.append('intel') + if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + matches.append('universal') + while minor >= 0: + for match in matches: + s = '%s_%s_%s_%s' % (name, major, minor, match) + if s != ARCH: # already there + arches.append(s) + minor -= 1 + + # Most specific - our Python version, ABI and arch + for abi in abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) + # manylinux + if abi != 'none' and sys.platform.startswith('linux'): + arch = arch.replace('linux_', '') + parts = _get_glibc_version() + if len(parts) == 2: + if parts >= (2, 5): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux1_%s' % arch)) + if parts >= (2, 12): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux2010_%s' % arch)) + if parts >= (2, 17): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux2014_%s' % arch)) + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux_%s_%s_%s' % (parts[0], parts[1], + arch))) + + # where no ABI / arch dependency, but IMP_PREFIX dependency + for i, version in enumerate(versions): + result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) + if i == 0: + result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) + + # no IMP_PREFIX, ABI or arch dependency + for i, version in enumerate(versions): + result.append((''.join(('py', version)), 'none', 'any')) + if i == 0: + result.append((''.join(('py', version[0])), 'none', 'any')) + + return set(result) + + +COMPATIBLE_TAGS = compatible_tags() + +del compatible_tags + + +def is_compatible(wheel, tags=None): + if not isinstance(wheel, Wheel): + wheel = Wheel(wheel) # assume it's a filename + result = False + if tags is None: + tags = COMPATIBLE_TAGS + for ver, abi, arch in tags: + if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: + result = True + break + return result diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/distro.py b/.venv/lib/python3.9/site-packages/pip/_vendor/distro.py new file mode 100644 index 0000000..7892741 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/distro.py @@ -0,0 +1,1386 @@ +# Copyright 2015,2016,2017 Nir Cohen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The ``distro`` package (``distro`` stands for Linux Distribution) provides +information about the Linux distribution it runs on, such as a reliable +machine-readable distro ID, or version information. + +It is the recommended replacement for Python's original +:py:func:`platform.linux_distribution` function, but it provides much more +functionality. An alternative implementation became necessary because Python +3.5 deprecated this function, and Python 3.8 removed it altogether. Its +predecessor function :py:func:`platform.dist` was already deprecated since +Python 2.6 and removed in Python 3.8. Still, there are many cases in which +access to OS distribution information is needed. See `Python issue 1322 +`_ for more information. +""" + +import argparse +import json +import logging +import os +import re +import shlex +import subprocess +import sys +import warnings + +__version__ = "1.6.0" + +# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2 +# support, can use typing.TYPE_CHECKING instead. See: +# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING +if False: # pragma: nocover + from typing import ( + Any, + Callable, + Dict, + Iterable, + Optional, + Sequence, + TextIO, + Tuple, + Type, + TypedDict, + Union, + ) + + VersionDict = TypedDict( + "VersionDict", {"major": str, "minor": str, "build_number": str} + ) + InfoDict = TypedDict( + "InfoDict", + { + "id": str, + "version": str, + "version_parts": VersionDict, + "like": str, + "codename": str, + }, + ) + + +_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") +_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib") +_OS_RELEASE_BASENAME = "os-release" + +#: Translation table for normalizing the "ID" attribute defined in os-release +#: files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as defined in the os-release file, translated to lower case, +#: with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_OS_ID = { + "ol": "oracle", # Oracle Linux +} + +#: Translation table for normalizing the "Distributor ID" attribute returned by +#: the lsb_release command, for use by the :func:`distro.id` method. +#: +#: * Key: Value as returned by the lsb_release command, translated to lower +#: case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_LSB_ID = { + "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4 + "enterpriseenterpriseserver": "oracle", # Oracle Linux 5 + "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation + "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server + "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode +} + +#: Translation table for normalizing the distro ID derived from the file name +#: of distro release files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as derived from the file name of a distro release file, +#: translated to lower case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_DISTRO_ID = { + "redhat": "rhel", # RHEL 6.x, 7.x +} + +# Pattern for content of distro release file (reversed) +_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( + r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)" +) + +# Pattern for base file name of distro release file +_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") + +# Base file names to be ignored when searching for distro release file +_DISTRO_RELEASE_IGNORE_BASENAMES = ( + "debian_version", + "lsb-release", + "oem-release", + _OS_RELEASE_BASENAME, + "system-release", + "plesk-release", + "iredmail-release", +) + + +def linux_distribution(full_distribution_name=True): + # type: (bool) -> Tuple[str, str, str] + """ + .. deprecated:: 1.6.0 + + :func:`distro.linux_distribution()` is deprecated. It should only be + used as a compatibility shim with Python's + :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`, + :func:`distro.version` and :func:`distro.name` instead. + + Return information about the current OS distribution as a tuple + ``(id_name, version, codename)`` with items as follows: + + * ``id_name``: If *full_distribution_name* is false, the result of + :func:`distro.id`. Otherwise, the result of :func:`distro.name`. + + * ``version``: The result of :func:`distro.version`. + + * ``codename``: The result of :func:`distro.codename`. + + The interface of this function is compatible with the original + :py:func:`platform.linux_distribution` function, supporting a subset of + its parameters. + + The data it returns may not exactly be the same, because it uses more data + sources than the original function, and that may lead to different data if + the OS distribution is not consistent across multiple data sources it + provides (there are indeed such distributions ...). + + Another reason for differences is the fact that the :func:`distro.id` + method normalizes the distro ID string to a reliable machine-readable value + for a number of popular OS distributions. + """ + warnings.warn( + "distro.linux_distribution() is deprecated. It should only be used as a " + "compatibility shim with Python's platform.linux_distribution(). Please use " + "distro.id(), distro.version() and distro.name() instead.", + DeprecationWarning, + stacklevel=2, + ) + return _distro.linux_distribution(full_distribution_name) + + +def id(): + # type: () -> str + """ + Return the distro ID of the current distribution, as a + machine-readable string. + + For a number of OS distributions, the returned distro ID value is + *reliable*, in the sense that it is documented and that it does not change + across releases of the distribution. + + This package maintains the following reliable distro ID values: + + ============== ========================================= + Distro ID Distribution + ============== ========================================= + "ubuntu" Ubuntu + "debian" Debian + "rhel" RedHat Enterprise Linux + "centos" CentOS + "fedora" Fedora + "sles" SUSE Linux Enterprise Server + "opensuse" openSUSE + "amazon" Amazon Linux + "arch" Arch Linux + "cloudlinux" CloudLinux OS + "exherbo" Exherbo Linux + "gentoo" GenToo Linux + "ibm_powerkvm" IBM PowerKVM + "kvmibm" KVM for IBM z Systems + "linuxmint" Linux Mint + "mageia" Mageia + "mandriva" Mandriva Linux + "parallels" Parallels + "pidora" Pidora + "raspbian" Raspbian + "oracle" Oracle Linux (and Oracle Enterprise Linux) + "scientific" Scientific Linux + "slackware" Slackware + "xenserver" XenServer + "openbsd" OpenBSD + "netbsd" NetBSD + "freebsd" FreeBSD + "midnightbsd" MidnightBSD + ============== ========================================= + + If you have a need to get distros for reliable IDs added into this set, + or if you find that the :func:`distro.id` function returns a different + distro ID for one of the listed distros, please create an issue in the + `distro issue tracker`_. + + **Lookup hierarchy and transformations:** + + First, the ID is obtained from the following sources, in the specified + order. The first available and non-empty value is used: + + * the value of the "ID" attribute of the os-release file, + + * the value of the "Distributor ID" attribute returned by the lsb_release + command, + + * the first part of the file name of the distro release file, + + The so determined ID value then passes the following transformations, + before it is returned by this method: + + * it is translated to lower case, + + * blanks (which should not be there anyway) are translated to underscores, + + * a normalization of the ID is performed, based upon + `normalization tables`_. The purpose of this normalization is to ensure + that the ID is as reliable as possible, even across incompatible changes + in the OS distributions. A common reason for an incompatible change is + the addition of an os-release file, or the addition of the lsb_release + command, with ID values that differ from what was previously determined + from the distro release file name. + """ + return _distro.id() + + +def name(pretty=False): + # type: (bool) -> str + """ + Return the name of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the name is returned without version or codename. + (e.g. "CentOS Linux") + + If *pretty* is true, the version and codename are appended. + (e.g. "CentOS Linux 7.1.1503 (Core)") + + **Lookup hierarchy:** + + The name is obtained from the following sources, in the specified order. + The first available and non-empty value is used: + + * If *pretty* is false: + + - the value of the "NAME" attribute of the os-release file, + + - the value of the "Distributor ID" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file. + + * If *pretty* is true: + + - the value of the "PRETTY_NAME" attribute of the os-release file, + + - the value of the "Description" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file, appended + with the value of the pretty version ("" and "" + fields) of the distro release file, if available. + """ + return _distro.name(pretty) + + +def version(pretty=False, best=False): + # type: (bool, bool) -> str + """ + Return the version of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the version is returned without codename (e.g. + "7.0"). + + If *pretty* is true, the codename in parenthesis is appended, if the + codename is non-empty (e.g. "7.0 (Maipo)"). + + Some distributions provide version numbers with different precisions in + the different sources of distribution information. Examining the different + sources in a fixed priority order does not always yield the most precise + version (e.g. for Debian 8.2, or CentOS 7.1). + + The *best* parameter can be used to control the approach for the returned + version: + + If *best* is false, the first non-empty version number in priority order of + the examined sources is returned. + + If *best* is true, the most precise version number out of all examined + sources is returned. + + **Lookup hierarchy:** + + In all cases, the version number is obtained from the following sources. + If *best* is false, this order represents the priority order: + + * the value of the "VERSION_ID" attribute of the os-release file, + * the value of the "Release" attribute returned by the lsb_release + command, + * the version number parsed from the "" field of the first line + of the distro release file, + * the version number parsed from the "PRETTY_NAME" attribute of the + os-release file, if it follows the format of the distro release files. + * the version number parsed from the "Description" attribute returned by + the lsb_release command, if it follows the format of the distro release + files. + """ + return _distro.version(pretty, best) + + +def version_parts(best=False): + # type: (bool) -> Tuple[str, str, str] + """ + Return the version of the current OS distribution as a tuple + ``(major, minor, build_number)`` with items as follows: + + * ``major``: The result of :func:`distro.major_version`. + + * ``minor``: The result of :func:`distro.minor_version`. + + * ``build_number``: The result of :func:`distro.build_number`. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.version_parts(best) + + +def major_version(best=False): + # type: (bool) -> str + """ + Return the major version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The major version is the first + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.major_version(best) + + +def minor_version(best=False): + # type: (bool) -> str + """ + Return the minor version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The minor version is the second + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.minor_version(best) + + +def build_number(best=False): + # type: (bool) -> str + """ + Return the build number of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The build number is the third part + of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.build_number(best) + + +def like(): + # type: () -> str + """ + Return a space-separated list of distro IDs of distributions that are + closely related to the current OS distribution in regards to packaging + and programming interfaces, for example distributions the current + distribution is a derivative from. + + **Lookup hierarchy:** + + This information item is only provided by the os-release file. + For details, see the description of the "ID_LIKE" attribute in the + `os-release man page + `_. + """ + return _distro.like() + + +def codename(): + # type: () -> str + """ + Return the codename for the release of the current OS distribution, + as a string. + + If the distribution does not have a codename, an empty string is returned. + + Note that the returned codename is not always really a codename. For + example, openSUSE returns "x86_64". This function does not handle such + cases in any special way and just returns the string it finds, if any. + + **Lookup hierarchy:** + + * the codename within the "VERSION" attribute of the os-release file, if + provided, + + * the value of the "Codename" attribute returned by the lsb_release + command, + + * the value of the "" field of the distro release file. + """ + return _distro.codename() + + +def info(pretty=False, best=False): + # type: (bool, bool) -> InfoDict + """ + Return certain machine-readable information items about the current OS + distribution in a dictionary, as shown in the following example: + + .. sourcecode:: python + + { + 'id': 'rhel', + 'version': '7.0', + 'version_parts': { + 'major': '7', + 'minor': '0', + 'build_number': '' + }, + 'like': 'fedora', + 'codename': 'Maipo' + } + + The dictionary structure and keys are always the same, regardless of which + information items are available in the underlying data sources. The values + for the various keys are as follows: + + * ``id``: The result of :func:`distro.id`. + + * ``version``: The result of :func:`distro.version`. + + * ``version_parts -> major``: The result of :func:`distro.major_version`. + + * ``version_parts -> minor``: The result of :func:`distro.minor_version`. + + * ``version_parts -> build_number``: The result of + :func:`distro.build_number`. + + * ``like``: The result of :func:`distro.like`. + + * ``codename``: The result of :func:`distro.codename`. + + For a description of the *pretty* and *best* parameters, see the + :func:`distro.version` method. + """ + return _distro.info(pretty, best) + + +def os_release_info(): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information items + from the os-release file data source of the current OS distribution. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_info() + + +def lsb_release_info(): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information items + from the lsb_release command data source of the current OS distribution. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_info() + + +def distro_release_info(): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_info() + + +def uname_info(): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + """ + return _distro.uname_info() + + +def os_release_attr(attribute): + # type: (str) -> str + """ + Return a single named information item from the os-release file data source + of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_attr(attribute) + + +def lsb_release_attr(attribute): + # type: (str) -> str + """ + Return a single named information item from the lsb_release command output + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_attr(attribute) + + +def distro_release_attr(attribute): + # type: (str) -> str + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_attr(attribute) + + +def uname_attr(attribute): + # type: (str) -> str + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + """ + return _distro.uname_attr(attribute) + + +try: + from functools import cached_property +except ImportError: + # Python < 3.8 + class cached_property(object): # type: ignore + """A version of @property which caches the value. On access, it calls the + underlying function and sets the value in `__dict__` so future accesses + will not re-call the property. + """ + + def __init__(self, f): + # type: (Callable[[Any], Any]) -> None + self._fname = f.__name__ + self._f = f + + def __get__(self, obj, owner): + # type: (Any, Type[Any]) -> Any + assert obj is not None, "call {} on an instance".format(self._fname) + ret = obj.__dict__[self._fname] = self._f(obj) + return ret + + +class LinuxDistribution(object): + """ + Provides information about a OS distribution. + + This package creates a private module-global instance of this class with + default initialization arguments, that is used by the + `consolidated accessor functions`_ and `single source accessor functions`_. + By using default initialization arguments, that module-global instance + returns data about the current OS distribution (i.e. the distro this + package runs on). + + Normally, it is not necessary to create additional instances of this class. + However, in situations where control is needed over the exact data sources + that are used, instances of this class can be created with a specific + distro release file, or a specific os-release file, or without invoking the + lsb_release command. + """ + + def __init__( + self, + include_lsb=True, + os_release_file="", + distro_release_file="", + include_uname=True, + root_dir=None, + ): + # type: (bool, str, str, bool, Optional[str]) -> None + """ + The initialization method of this class gathers information from the + available data sources, and stores that in private instance attributes. + Subsequent access to the information items uses these private instance + attributes, so that the data sources are read only once. + + Parameters: + + * ``include_lsb`` (bool): Controls whether the + `lsb_release command output`_ is included as a data source. + + If the lsb_release command is not available in the program execution + path, the data source for the lsb_release command will be empty. + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is to be used as a data source. + + An empty string (the default) will cause the default path name to + be used (see `os-release file`_ for details). + + If the specified or defaulted os-release file does not exist, the + data source for the os-release file will be empty. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is to be used as a data source. + + An empty string (the default) will cause a default search algorithm + to be used (see `distro release file`_ for details). + + If the specified distro release file does not exist, or if no default + distro release file can be found, the data source for the distro + release file will be empty. + + * ``include_uname`` (bool): Controls whether uname command output is + included as a data source. If the uname command is not available in + the program execution path the data source for the uname command will + be empty. + + * ``root_dir`` (string): The absolute path to the root directory to use + to find distro-related information files. + + Public instance attributes: + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. + This controls whether the lsb information will be loaded. + + * ``include_uname`` (bool): The result of the ``include_uname`` + parameter. This controls whether the uname information will + be loaded. + + Raises: + + * :py:exc:`IOError`: Some I/O issue with an os-release file or distro + release file. + + * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had + some issue (other than not being available in the program execution + path). + + * :py:exc:`UnicodeError`: A data source has unexpected characters or + uses an unexpected encoding. + """ + self.root_dir = root_dir + self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR + self.usr_lib_dir = ( + os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR + ) + + if os_release_file: + self.os_release_file = os_release_file + else: + etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME) + usr_lib_os_release_file = os.path.join( + self.usr_lib_dir, _OS_RELEASE_BASENAME + ) + + # NOTE: The idea is to respect order **and** have it set + # at all times for API backwards compatibility. + if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile( + usr_lib_os_release_file + ): + self.os_release_file = etc_dir_os_release_file + else: + self.os_release_file = usr_lib_os_release_file + + self.distro_release_file = distro_release_file or "" # updated later + self.include_lsb = include_lsb + self.include_uname = include_uname + + def __repr__(self): + # type: () -> str + """Return repr of all info""" + return ( + "LinuxDistribution(" + "os_release_file={self.os_release_file!r}, " + "distro_release_file={self.distro_release_file!r}, " + "include_lsb={self.include_lsb!r}, " + "include_uname={self.include_uname!r}, " + "_os_release_info={self._os_release_info!r}, " + "_lsb_release_info={self._lsb_release_info!r}, " + "_distro_release_info={self._distro_release_info!r}, " + "_uname_info={self._uname_info!r})".format(self=self) + ) + + def linux_distribution(self, full_distribution_name=True): + # type: (bool) -> Tuple[str, str, str] + """ + Return information about the OS distribution that is compatible + with Python's :func:`platform.linux_distribution`, supporting a subset + of its parameters. + + For details, see :func:`distro.linux_distribution`. + """ + return ( + self.name() if full_distribution_name else self.id(), + self.version(), + self.codename(), + ) + + def id(self): + # type: () -> str + """Return the distro ID of the OS distribution, as a string. + + For details, see :func:`distro.id`. + """ + + def normalize(distro_id, table): + # type: (str, Dict[str, str]) -> str + distro_id = distro_id.lower().replace(" ", "_") + return table.get(distro_id, distro_id) + + distro_id = self.os_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_OS_ID) + + distro_id = self.lsb_release_attr("distributor_id") + if distro_id: + return normalize(distro_id, NORMALIZED_LSB_ID) + + distro_id = self.distro_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + distro_id = self.uname_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + return "" + + def name(self, pretty=False): + # type: (bool) -> str + """ + Return the name of the OS distribution, as a string. + + For details, see :func:`distro.name`. + """ + name = ( + self.os_release_attr("name") + or self.lsb_release_attr("distributor_id") + or self.distro_release_attr("name") + or self.uname_attr("name") + ) + if pretty: + name = self.os_release_attr("pretty_name") or self.lsb_release_attr( + "description" + ) + if not name: + name = self.distro_release_attr("name") or self.uname_attr("name") + version = self.version(pretty=True) + if version: + name = name + " " + version + return name or "" + + def version(self, pretty=False, best=False): + # type: (bool, bool) -> str + """ + Return the version of the OS distribution, as a string. + + For details, see :func:`distro.version`. + """ + versions = [ + self.os_release_attr("version_id"), + self.lsb_release_attr("release"), + self.distro_release_attr("version_id"), + self._parse_distro_release_content(self.os_release_attr("pretty_name")).get( + "version_id", "" + ), + self._parse_distro_release_content( + self.lsb_release_attr("description") + ).get("version_id", ""), + self.uname_attr("release"), + ] + version = "" + if best: + # This algorithm uses the last version in priority order that has + # the best precision. If the versions are not in conflict, that + # does not matter; otherwise, using the last one instead of the + # first one might be considered a surprise. + for v in versions: + if v.count(".") > version.count(".") or version == "": + version = v + else: + for v in versions: + if v != "": + version = v + break + if pretty and version and self.codename(): + version = "{0} ({1})".format(version, self.codename()) + return version + + def version_parts(self, best=False): + # type: (bool) -> Tuple[str, str, str] + """ + Return the version of the OS distribution, as a tuple of version + numbers. + + For details, see :func:`distro.version_parts`. + """ + version_str = self.version(best=best) + if version_str: + version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?") + matches = version_regex.match(version_str) + if matches: + major, minor, build_number = matches.groups() + return major, minor or "", build_number or "" + return "", "", "" + + def major_version(self, best=False): + # type: (bool) -> str + """ + Return the major version number of the current distribution. + + For details, see :func:`distro.major_version`. + """ + return self.version_parts(best)[0] + + def minor_version(self, best=False): + # type: (bool) -> str + """ + Return the minor version number of the current distribution. + + For details, see :func:`distro.minor_version`. + """ + return self.version_parts(best)[1] + + def build_number(self, best=False): + # type: (bool) -> str + """ + Return the build number of the current distribution. + + For details, see :func:`distro.build_number`. + """ + return self.version_parts(best)[2] + + def like(self): + # type: () -> str + """ + Return the IDs of distributions that are like the OS distribution. + + For details, see :func:`distro.like`. + """ + return self.os_release_attr("id_like") or "" + + def codename(self): + # type: () -> str + """ + Return the codename of the OS distribution. + + For details, see :func:`distro.codename`. + """ + try: + # Handle os_release specially since distros might purposefully set + # this to empty string to have no codename + return self._os_release_info["codename"] + except KeyError: + return ( + self.lsb_release_attr("codename") + or self.distro_release_attr("codename") + or "" + ) + + def info(self, pretty=False, best=False): + # type: (bool, bool) -> InfoDict + """ + Return certain machine-readable information about the OS + distribution. + + For details, see :func:`distro.info`. + """ + return dict( + id=self.id(), + version=self.version(pretty, best), + version_parts=dict( + major=self.major_version(best), + minor=self.minor_version(best), + build_number=self.build_number(best), + ), + like=self.like(), + codename=self.codename(), + ) + + def os_release_info(self): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information + items from the os-release file data source of the OS distribution. + + For details, see :func:`distro.os_release_info`. + """ + return self._os_release_info + + def lsb_release_info(self): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information + items from the lsb_release command data source of the OS + distribution. + + For details, see :func:`distro.lsb_release_info`. + """ + return self._lsb_release_info + + def distro_release_info(self): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information + items from the distro release file data source of the OS + distribution. + + For details, see :func:`distro.distro_release_info`. + """ + return self._distro_release_info + + def uname_info(self): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information + items from the uname command data source of the OS distribution. + + For details, see :func:`distro.uname_info`. + """ + return self._uname_info + + def os_release_attr(self, attribute): + # type: (str) -> str + """ + Return a single named information item from the os-release file data + source of the OS distribution. + + For details, see :func:`distro.os_release_attr`. + """ + return self._os_release_info.get(attribute, "") + + def lsb_release_attr(self, attribute): + # type: (str) -> str + """ + Return a single named information item from the lsb_release command + output data source of the OS distribution. + + For details, see :func:`distro.lsb_release_attr`. + """ + return self._lsb_release_info.get(attribute, "") + + def distro_release_attr(self, attribute): + # type: (str) -> str + """ + Return a single named information item from the distro release file + data source of the OS distribution. + + For details, see :func:`distro.distro_release_attr`. + """ + return self._distro_release_info.get(attribute, "") + + def uname_attr(self, attribute): + # type: (str) -> str + """ + Return a single named information item from the uname command + output data source of the OS distribution. + + For details, see :func:`distro.uname_attr`. + """ + return self._uname_info.get(attribute, "") + + @cached_property + def _os_release_info(self): + # type: () -> Dict[str, str] + """ + Get the information items from the specified os-release file. + + Returns: + A dictionary containing all information items. + """ + if os.path.isfile(self.os_release_file): + with open(self.os_release_file) as release_file: + return self._parse_os_release_content(release_file) + return {} + + @staticmethod + def _parse_os_release_content(lines): + # type: (TextIO) -> Dict[str, str] + """ + Parse the lines of an os-release file. + + Parameters: + + * lines: Iterable through the lines in the os-release file. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + lexer = shlex.shlex(lines, posix=True) + lexer.whitespace_split = True + + # The shlex module defines its `wordchars` variable using literals, + # making it dependent on the encoding of the Python source file. + # In Python 2.6 and 2.7, the shlex source file is encoded in + # 'iso-8859-1', and the `wordchars` variable is defined as a byte + # string. This causes a UnicodeDecodeError to be raised when the + # parsed content is a unicode object. The following fix resolves that + # (... but it should be fixed in shlex...): + if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes): + lexer.wordchars = lexer.wordchars.decode("iso-8859-1") + + tokens = list(lexer) + for token in tokens: + # At this point, all shell-like parsing has been done (i.e. + # comments processed, quotes and backslash escape sequences + # processed, multi-line values assembled, trailing newlines + # stripped, etc.), so the tokens are now either: + # * variable assignments: var=value + # * commands or their arguments (not allowed in os-release) + if "=" in token: + k, v = token.split("=", 1) + props[k.lower()] = v + else: + # Ignore any tokens that are not variable assignments + pass + + if "version_codename" in props: + # os-release added a version_codename field. Use that in + # preference to anything else Note that some distros purposefully + # do not have code names. They should be setting + # version_codename="" + props["codename"] = props["version_codename"] + elif "ubuntu_codename" in props: + # Same as above but a non-standard field name used on older Ubuntus + props["codename"] = props["ubuntu_codename"] + elif "version" in props: + # If there is no version_codename, parse it from the version + match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"]) + if match: + codename = match.group() + codename = codename.strip("()") + codename = codename.strip(",") + codename = codename.strip() + # codename appears within paranthese. + props["codename"] = codename + + return props + + @cached_property + def _lsb_release_info(self): + # type: () -> Dict[str, str] + """ + Get the information items from the lsb_release command output. + + Returns: + A dictionary containing all information items. + """ + if not self.include_lsb: + return {} + with open(os.devnull, "wb") as devnull: + try: + cmd = ("lsb_release", "-a") + stdout = subprocess.check_output(cmd, stderr=devnull) + # Command not found or lsb_release returned error + except (OSError, subprocess.CalledProcessError): + return {} + content = self._to_str(stdout).splitlines() + return self._parse_lsb_release_content(content) + + @staticmethod + def _parse_lsb_release_content(lines): + # type: (Iterable[str]) -> Dict[str, str] + """ + Parse the output of the lsb_release command. + + Parameters: + + * lines: Iterable through the lines of the lsb_release output. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + for line in lines: + kv = line.strip("\n").split(":", 1) + if len(kv) != 2: + # Ignore lines without colon. + continue + k, v = kv + props.update({k.replace(" ", "_").lower(): v.strip()}) + return props + + @cached_property + def _uname_info(self): + # type: () -> Dict[str, str] + with open(os.devnull, "wb") as devnull: + try: + cmd = ("uname", "-rs") + stdout = subprocess.check_output(cmd, stderr=devnull) + except OSError: + return {} + content = self._to_str(stdout).splitlines() + return self._parse_uname_content(content) + + @staticmethod + def _parse_uname_content(lines): + # type: (Sequence[str]) -> Dict[str, str] + props = {} + match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) + if match: + name, version = match.groups() + + # This is to prevent the Linux kernel version from + # appearing as the 'best' version on otherwise + # identifiable distributions. + if name == "Linux": + return {} + props["id"] = name.lower() + props["name"] = name + props["release"] = version + return props + + @staticmethod + def _to_str(text): + # type: (Union[bytes, str]) -> str + encoding = sys.getfilesystemencoding() + encoding = "utf-8" if encoding == "ascii" else encoding + + if sys.version_info[0] >= 3: + if isinstance(text, bytes): + return text.decode(encoding) + else: + if isinstance(text, unicode): # noqa + return text.encode(encoding) + + return text + + @cached_property + def _distro_release_info(self): + # type: () -> Dict[str, str] + """ + Get the information items from the specified distro release file. + + Returns: + A dictionary containing all information items. + """ + if self.distro_release_file: + # If it was specified, we use it and parse what we can, even if + # its file name or content does not match the expected pattern. + distro_info = self._parse_distro_release_file(self.distro_release_file) + basename = os.path.basename(self.distro_release_file) + # The file name pattern for user-specified distro release files + # is somewhat more tolerant (compared to when searching for the + # file), because we want to use what was specified as best as + # possible. + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if "name" in distro_info and "cloudlinux" in distro_info["name"].lower(): + distro_info["id"] = "cloudlinux" + elif match: + distro_info["id"] = match.group(1) + return distro_info + else: + try: + basenames = os.listdir(self.etc_dir) + # We sort for repeatability in cases where there are multiple + # distro specific files; e.g. CentOS, Oracle, Enterprise all + # containing `redhat-release` on top of their own. + basenames.sort() + except OSError: + # This may occur when /etc is not readable but we can't be + # sure about the *-release files. Check common entries of + # /etc for information. If they turn out to not be there the + # error is handled in `_parse_distro_release_file()`. + basenames = [ + "SuSE-release", + "arch-release", + "base-release", + "centos-release", + "fedora-release", + "gentoo-release", + "mageia-release", + "mandrake-release", + "mandriva-release", + "mandrivalinux-release", + "manjaro-release", + "oracle-release", + "redhat-release", + "sl-release", + "slackware-version", + ] + for basename in basenames: + if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: + continue + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match: + filepath = os.path.join(self.etc_dir, basename) + distro_info = self._parse_distro_release_file(filepath) + if "name" in distro_info: + # The name is always present if the pattern matches + self.distro_release_file = filepath + distro_info["id"] = match.group(1) + if "cloudlinux" in distro_info["name"].lower(): + distro_info["id"] = "cloudlinux" + return distro_info + return {} + + def _parse_distro_release_file(self, filepath): + # type: (str) -> Dict[str, str] + """ + Parse a distro release file. + + Parameters: + + * filepath: Path name of the distro release file. + + Returns: + A dictionary containing all information items. + """ + try: + with open(filepath) as fp: + # Only parse the first line. For instance, on SLES there + # are multiple lines. We don't want them... + return self._parse_distro_release_content(fp.readline()) + except (OSError, IOError): + # Ignore not being able to read a specific, seemingly version + # related file. + # See https://github.com/python-distro/distro/issues/162 + return {} + + @staticmethod + def _parse_distro_release_content(line): + # type: (str) -> Dict[str, str] + """ + Parse a line from a distro release file. + + Parameters: + * line: Line from the distro release file. Must be a unicode string + or a UTF-8 encoded byte string. + + Returns: + A dictionary containing all information items. + """ + matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1]) + distro_info = {} + if matches: + # regexp ensures non-None + distro_info["name"] = matches.group(3)[::-1] + if matches.group(2): + distro_info["version_id"] = matches.group(2)[::-1] + if matches.group(1): + distro_info["codename"] = matches.group(1)[::-1] + elif line: + distro_info["name"] = line.strip() + return distro_info + + +_distro = LinuxDistribution() + + +def main(): + # type: () -> None + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.StreamHandler(sys.stdout)) + + parser = argparse.ArgumentParser(description="OS distro info tool") + parser.add_argument( + "--json", "-j", help="Output in machine readable format", action="store_true" + ) + + parser.add_argument( + "--root-dir", + "-r", + type=str, + dest="root_dir", + help="Path to the root filesystem directory (defaults to /)", + ) + + args = parser.parse_args() + + if args.root_dir: + dist = LinuxDistribution( + include_lsb=False, include_uname=False, root_dir=args.root_dir + ) + else: + dist = _distro + + if args.json: + logger.info(json.dumps(dist.info(), indent=4, sort_keys=True)) + else: + logger.info("Name: %s", dist.name(pretty=True)) + distribution_version = dist.version(pretty=True) + logger.info("Version: %s", distribution_version) + distribution_codename = dist.codename() + logger.info("Codename: %s", distribution_codename) + + +if __name__ == "__main__": + main() diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__init__.py new file mode 100644 index 0000000..d1d82f1 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__init__.py @@ -0,0 +1,35 @@ +""" +HTML parsing library based on the `WHATWG HTML specification +`_. The parser is designed to be compatible with +existing HTML found in the wild and implements well-defined error recovery that +is largely compatible with modern desktop web browsers. + +Example usage:: + + from pip._vendor import html5lib + with open("my_document.html", "rb") as f: + tree = html5lib.parse(f) + +For convenience, this module re-exports the following names: + +* :func:`~.html5parser.parse` +* :func:`~.html5parser.parseFragment` +* :class:`~.html5parser.HTMLParser` +* :func:`~.treebuilders.getTreeBuilder` +* :func:`~.treewalkers.getTreeWalker` +* :func:`~.serializer.serialize` +""" + +from __future__ import absolute_import, division, unicode_literals + +from .html5parser import HTMLParser, parse, parseFragment +from .treebuilders import getTreeBuilder +from .treewalkers import getTreeWalker +from .serializer import serialize + +__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", + "getTreeWalker", "serialize"] + +# this has to be at the top level, see how setup.py parses this +#: Distribution version number. +__version__ = "1.1" diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..fb7f3f0 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-39.pyc new file mode 100644 index 0000000..138b266 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-39.pyc new file mode 100644 index 0000000..bb9a9bd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-39.pyc new file mode 100644 index 0000000..d86591e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-39.pyc new file mode 100644 index 0000000..9d3ea51 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-39.pyc new file mode 100644 index 0000000..3881516 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-39.pyc new file mode 100644 index 0000000..f2b957d Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-39.pyc new file mode 100644 index 0000000..37da98b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_ihatexml.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_ihatexml.py new file mode 100644 index 0000000..3ff803c --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_ihatexml.py @@ -0,0 +1,289 @@ +from __future__ import absolute_import, division, unicode_literals + +import re +import warnings + +from .constants import DataLossWarning + +baseChar = """ +[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] | +[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] | +[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] | +[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 | +[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] | +[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] | +[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] | +[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] | +[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 | +[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] | +[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] | +[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D | +[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] | +[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] | +[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] | +[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] | +[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] | +[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] | +[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 | +[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] | +[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] | +[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] | +[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] | +[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] | +[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] | +[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] | +[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] | +[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] | +[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] | +[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A | +#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 | +#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] | +#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] | +[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] | +[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C | +#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 | +[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] | +[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] | +[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 | +[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] | +[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B | +#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE | +[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] | +[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 | +[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] | +[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]""" + +ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]""" + +combiningCharacter = """ +[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] | +[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 | +[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] | +[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] | +#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] | +[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] | +[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 | +#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] | +[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC | +[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] | +#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] | +[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] | +[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] | +[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] | +[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] | +[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] | +#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 | +[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] | +#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] | +[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] | +[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] | +#x3099 | #x309A""" + +digit = """ +[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] | +[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] | +[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] | +[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]""" + +extender = """ +#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 | +#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]""" + +letter = " | ".join([baseChar, ideographic]) + +# Without the +name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter, + extender]) +nameFirst = " | ".join([letter, "_"]) + +reChar = re.compile(r"#x([\d|A-F]{4,4})") +reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]") + + +def charStringToList(chars): + charRanges = [item.strip() for item in chars.split(" | ")] + rv = [] + for item in charRanges: + foundMatch = False + for regexp in (reChar, reCharRange): + match = regexp.match(item) + if match is not None: + rv.append([hexToInt(item) for item in match.groups()]) + if len(rv[-1]) == 1: + rv[-1] = rv[-1] * 2 + foundMatch = True + break + if not foundMatch: + assert len(item) == 1 + + rv.append([ord(item)] * 2) + rv = normaliseCharList(rv) + return rv + + +def normaliseCharList(charList): + charList = sorted(charList) + for item in charList: + assert item[1] >= item[0] + rv = [] + i = 0 + while i < len(charList): + j = 1 + rv.append(charList[i]) + while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1: + rv[-1][1] = charList[i + j][1] + j += 1 + i += j + return rv + + +# We don't really support characters above the BMP :( +max_unicode = int("FFFF", 16) + + +def missingRanges(charList): + rv = [] + if charList[0] != 0: + rv.append([0, charList[0][0] - 1]) + for i, item in enumerate(charList[:-1]): + rv.append([item[1] + 1, charList[i + 1][0] - 1]) + if charList[-1][1] != max_unicode: + rv.append([charList[-1][1] + 1, max_unicode]) + return rv + + +def listToRegexpStr(charList): + rv = [] + for item in charList: + if item[0] == item[1]: + rv.append(escapeRegexp(chr(item[0]))) + else: + rv.append(escapeRegexp(chr(item[0])) + "-" + + escapeRegexp(chr(item[1]))) + return "[%s]" % "".join(rv) + + +def hexToInt(hex_str): + return int(hex_str, 16) + + +def escapeRegexp(string): + specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}", + "[", "]", "|", "(", ")", "-") + for char in specialCharacters: + string = string.replace(char, "\\" + char) + + return string + +# output from the above +nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa + +nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa + +# Simpler things +nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\\-'()+,./:=?;!*#@$_%]") + + +class InfosetFilter(object): + replacementRegexp = re.compile(r"U[\dA-F]{5,5}") + + def __init__(self, + dropXmlnsLocalName=False, + dropXmlnsAttrNs=False, + preventDoubleDashComments=False, + preventDashAtCommentEnd=False, + replaceFormFeedCharacters=True, + preventSingleQuotePubid=False): + + self.dropXmlnsLocalName = dropXmlnsLocalName + self.dropXmlnsAttrNs = dropXmlnsAttrNs + + self.preventDoubleDashComments = preventDoubleDashComments + self.preventDashAtCommentEnd = preventDashAtCommentEnd + + self.replaceFormFeedCharacters = replaceFormFeedCharacters + + self.preventSingleQuotePubid = preventSingleQuotePubid + + self.replaceCache = {} + + def coerceAttribute(self, name, namespace=None): + if self.dropXmlnsLocalName and name.startswith("xmlns:"): + warnings.warn("Attributes cannot begin with xmlns", DataLossWarning) + return None + elif (self.dropXmlnsAttrNs and + namespace == "http://www.w3.org/2000/xmlns/"): + warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning) + return None + else: + return self.toXmlName(name) + + def coerceElement(self, name): + return self.toXmlName(name) + + def coerceComment(self, data): + if self.preventDoubleDashComments: + while "--" in data: + warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning) + data = data.replace("--", "- -") + if data.endswith("-"): + warnings.warn("Comments cannot end in a dash", DataLossWarning) + data += " " + return data + + def coerceCharacters(self, data): + if self.replaceFormFeedCharacters: + for _ in range(data.count("\x0C")): + warnings.warn("Text cannot contain U+000C", DataLossWarning) + data = data.replace("\x0C", " ") + # Other non-xml characters + return data + + def coercePubid(self, data): + dataOutput = data + for char in nonPubidCharRegexp.findall(data): + warnings.warn("Coercing non-XML pubid", DataLossWarning) + replacement = self.getReplacementCharacter(char) + dataOutput = dataOutput.replace(char, replacement) + if self.preventSingleQuotePubid and dataOutput.find("'") >= 0: + warnings.warn("Pubid cannot contain single quote", DataLossWarning) + dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'")) + return dataOutput + + def toXmlName(self, name): + nameFirst = name[0] + nameRest = name[1:] + m = nonXmlNameFirstBMPRegexp.match(nameFirst) + if m: + warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) + nameFirstOutput = self.getReplacementCharacter(nameFirst) + else: + nameFirstOutput = nameFirst + + nameRestOutput = nameRest + replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) + for char in replaceChars: + warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) + replacement = self.getReplacementCharacter(char) + nameRestOutput = nameRestOutput.replace(char, replacement) + return nameFirstOutput + nameRestOutput + + def getReplacementCharacter(self, char): + if char in self.replaceCache: + replacement = self.replaceCache[char] + else: + replacement = self.escapeChar(char) + return replacement + + def fromXmlName(self, name): + for item in set(self.replacementRegexp.findall(name)): + name = name.replace(item, self.unescapeChar(item)) + return name + + def escapeChar(self, char): + replacement = "U%05X" % ord(char) + self.replaceCache[char] = replacement + return replacement + + def unescapeChar(self, charcode): + return chr(int(charcode[1:], 16)) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_inputstream.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_inputstream.py new file mode 100644 index 0000000..e0bb376 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_inputstream.py @@ -0,0 +1,918 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import text_type +from pip._vendor.six.moves import http_client, urllib + +import codecs +import re +from io import BytesIO, StringIO + +from pip._vendor import webencodings + +from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase +from .constants import _ReparseException +from . import _utils + +# Non-unicode versions of constants for use in the pre-parser +spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters]) +asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters]) +asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase]) +spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"]) + + +invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa + +if _utils.supports_lone_surrogates: + # Use one extra step of indirection and create surrogates with + # eval. Not using this indirection would introduce an illegal + # unicode literal on platforms not supporting such lone + # surrogates. + assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1 + invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] + + eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used + "]") +else: + invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) + +non_bmp_invalid_codepoints = {0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, + 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, + 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, + 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, + 0x10FFFE, 0x10FFFF} + +ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]") + +# Cache for charsUntil() +charsUntilRegEx = {} + + +class BufferedStream(object): + """Buffering for streams that do not have buffering of their own + + The buffer is implemented as a list of chunks on the assumption that + joining many strings will be slow since it is O(n**2) + """ + + def __init__(self, stream): + self.stream = stream + self.buffer = [] + self.position = [-1, 0] # chunk number, offset + + def tell(self): + pos = 0 + for chunk in self.buffer[:self.position[0]]: + pos += len(chunk) + pos += self.position[1] + return pos + + def seek(self, pos): + assert pos <= self._bufferedBytes() + offset = pos + i = 0 + while len(self.buffer[i]) < offset: + offset -= len(self.buffer[i]) + i += 1 + self.position = [i, offset] + + def read(self, bytes): + if not self.buffer: + return self._readStream(bytes) + elif (self.position[0] == len(self.buffer) and + self.position[1] == len(self.buffer[-1])): + return self._readStream(bytes) + else: + return self._readFromBuffer(bytes) + + def _bufferedBytes(self): + return sum([len(item) for item in self.buffer]) + + def _readStream(self, bytes): + data = self.stream.read(bytes) + self.buffer.append(data) + self.position[0] += 1 + self.position[1] = len(data) + return data + + def _readFromBuffer(self, bytes): + remainingBytes = bytes + rv = [] + bufferIndex = self.position[0] + bufferOffset = self.position[1] + while bufferIndex < len(self.buffer) and remainingBytes != 0: + assert remainingBytes > 0 + bufferedData = self.buffer[bufferIndex] + + if remainingBytes <= len(bufferedData) - bufferOffset: + bytesToRead = remainingBytes + self.position = [bufferIndex, bufferOffset + bytesToRead] + else: + bytesToRead = len(bufferedData) - bufferOffset + self.position = [bufferIndex, len(bufferedData)] + bufferIndex += 1 + rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead]) + remainingBytes -= bytesToRead + + bufferOffset = 0 + + if remainingBytes: + rv.append(self._readStream(remainingBytes)) + + return b"".join(rv) + + +def HTMLInputStream(source, **kwargs): + # Work around Python bug #20007: read(0) closes the connection. + # http://bugs.python.org/issue20007 + if (isinstance(source, http_client.HTTPResponse) or + # Also check for addinfourl wrapping HTTPResponse + (isinstance(source, urllib.response.addbase) and + isinstance(source.fp, http_client.HTTPResponse))): + isUnicode = False + elif hasattr(source, "read"): + isUnicode = isinstance(source.read(0), text_type) + else: + isUnicode = isinstance(source, text_type) + + if isUnicode: + encodings = [x for x in kwargs if x.endswith("_encoding")] + if encodings: + raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings) + + return HTMLUnicodeInputStream(source, **kwargs) + else: + return HTMLBinaryInputStream(source, **kwargs) + + +class HTMLUnicodeInputStream(object): + """Provides a unicode stream of characters to the HTMLTokenizer. + + This class takes care of character encoding and removing or replacing + incorrect byte-sequences and also provides column and line tracking. + + """ + + _defaultChunkSize = 10240 + + def __init__(self, source): + """Initialises the HTMLInputStream. + + HTMLInputStream(source, [encoding]) -> Normalized stream from source + for use by html5lib. + + source can be either a file-object, local filename or a string. + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + """ + + if not _utils.supports_lone_surrogates: + # Such platforms will have already checked for such + # surrogate errors, so no need to do this checking. + self.reportCharacterErrors = None + elif len("\U0010FFFF") == 1: + self.reportCharacterErrors = self.characterErrorsUCS4 + else: + self.reportCharacterErrors = self.characterErrorsUCS2 + + # List of where new lines occur + self.newLines = [0] + + self.charEncoding = (lookupEncoding("utf-8"), "certain") + self.dataStream = self.openStream(source) + + self.reset() + + def reset(self): + self.chunk = "" + self.chunkSize = 0 + self.chunkOffset = 0 + self.errors = [] + + # number of (complete) lines in previous chunks + self.prevNumLines = 0 + # number of columns in the last line of the previous chunk + self.prevNumCols = 0 + + # Deal with CR LF and surrogates split over chunk boundaries + self._bufferedCharacter = None + + def openStream(self, source): + """Produces a file object from source. + + source can be either a file object, local filename or a string. + + """ + # Already a file object + if hasattr(source, 'read'): + stream = source + else: + stream = StringIO(source) + + return stream + + def _position(self, offset): + chunk = self.chunk + nLines = chunk.count('\n', 0, offset) + positionLine = self.prevNumLines + nLines + lastLinePos = chunk.rfind('\n', 0, offset) + if lastLinePos == -1: + positionColumn = self.prevNumCols + offset + else: + positionColumn = offset - (lastLinePos + 1) + return (positionLine, positionColumn) + + def position(self): + """Returns (line, col) of the current position in the stream.""" + line, col = self._position(self.chunkOffset) + return (line + 1, col) + + def char(self): + """ Read one character from the stream or queue if available. Return + EOF when EOF is reached. + """ + # Read a new chunk from the input stream if necessary + if self.chunkOffset >= self.chunkSize: + if not self.readChunk(): + return EOF + + chunkOffset = self.chunkOffset + char = self.chunk[chunkOffset] + self.chunkOffset = chunkOffset + 1 + + return char + + def readChunk(self, chunkSize=None): + if chunkSize is None: + chunkSize = self._defaultChunkSize + + self.prevNumLines, self.prevNumCols = self._position(self.chunkSize) + + self.chunk = "" + self.chunkSize = 0 + self.chunkOffset = 0 + + data = self.dataStream.read(chunkSize) + + # Deal with CR LF and surrogates broken across chunks + if self._bufferedCharacter: + data = self._bufferedCharacter + data + self._bufferedCharacter = None + elif not data: + # We have no more data, bye-bye stream + return False + + if len(data) > 1: + lastv = ord(data[-1]) + if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF: + self._bufferedCharacter = data[-1] + data = data[:-1] + + if self.reportCharacterErrors: + self.reportCharacterErrors(data) + + # Replace invalid characters + data = data.replace("\r\n", "\n") + data = data.replace("\r", "\n") + + self.chunk = data + self.chunkSize = len(data) + + return True + + def characterErrorsUCS4(self, data): + for _ in range(len(invalid_unicode_re.findall(data))): + self.errors.append("invalid-codepoint") + + def characterErrorsUCS2(self, data): + # Someone picked the wrong compile option + # You lose + skip = False + for match in invalid_unicode_re.finditer(data): + if skip: + continue + codepoint = ord(match.group()) + pos = match.start() + # Pretty sure there should be endianness issues here + if _utils.isSurrogatePair(data[pos:pos + 2]): + # We have a surrogate pair! + char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2]) + if char_val in non_bmp_invalid_codepoints: + self.errors.append("invalid-codepoint") + skip = True + elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and + pos == len(data) - 1): + self.errors.append("invalid-codepoint") + else: + skip = False + self.errors.append("invalid-codepoint") + + def charsUntil(self, characters, opposite=False): + """ Returns a string of characters from the stream up to but not + including any character in 'characters' or EOF. 'characters' must be + a container that supports the 'in' method and iteration over its + characters. + """ + + # Use a cache of regexps to find the required characters + try: + chars = charsUntilRegEx[(characters, opposite)] + except KeyError: + if __debug__: + for c in characters: + assert(ord(c) < 128) + regex = "".join(["\\x%02x" % ord(c) for c in characters]) + if not opposite: + regex = "^%s" % regex + chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex) + + rv = [] + + while True: + # Find the longest matching prefix + m = chars.match(self.chunk, self.chunkOffset) + if m is None: + # If nothing matched, and it wasn't because we ran out of chunk, + # then stop + if self.chunkOffset != self.chunkSize: + break + else: + end = m.end() + # If not the whole chunk matched, return everything + # up to the part that didn't match + if end != self.chunkSize: + rv.append(self.chunk[self.chunkOffset:end]) + self.chunkOffset = end + break + # If the whole remainder of the chunk matched, + # use it all and read the next chunk + rv.append(self.chunk[self.chunkOffset:]) + if not self.readChunk(): + # Reached EOF + break + + r = "".join(rv) + return r + + def unget(self, char): + # Only one character is allowed to be ungotten at once - it must + # be consumed again before any further call to unget + if char is not EOF: + if self.chunkOffset == 0: + # unget is called quite rarely, so it's a good idea to do + # more work here if it saves a bit of work in the frequently + # called char and charsUntil. + # So, just prepend the ungotten character onto the current + # chunk: + self.chunk = char + self.chunk + self.chunkSize += 1 + else: + self.chunkOffset -= 1 + assert self.chunk[self.chunkOffset] == char + + +class HTMLBinaryInputStream(HTMLUnicodeInputStream): + """Provides a unicode stream of characters to the HTMLTokenizer. + + This class takes care of character encoding and removing or replacing + incorrect byte-sequences and also provides column and line tracking. + + """ + + def __init__(self, source, override_encoding=None, transport_encoding=None, + same_origin_parent_encoding=None, likely_encoding=None, + default_encoding="windows-1252", useChardet=True): + """Initialises the HTMLInputStream. + + HTMLInputStream(source, [encoding]) -> Normalized stream from source + for use by html5lib. + + source can be either a file-object, local filename or a string. + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + """ + # Raw Stream - for unicode objects this will encode to utf-8 and set + # self.charEncoding as appropriate + self.rawStream = self.openStream(source) + + HTMLUnicodeInputStream.__init__(self, self.rawStream) + + # Encoding Information + # Number of bytes to use when looking for a meta element with + # encoding information + self.numBytesMeta = 1024 + # Number of bytes to use when using detecting encoding using chardet + self.numBytesChardet = 100 + # Things from args + self.override_encoding = override_encoding + self.transport_encoding = transport_encoding + self.same_origin_parent_encoding = same_origin_parent_encoding + self.likely_encoding = likely_encoding + self.default_encoding = default_encoding + + # Determine encoding + self.charEncoding = self.determineEncoding(useChardet) + assert self.charEncoding[0] is not None + + # Call superclass + self.reset() + + def reset(self): + self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace') + HTMLUnicodeInputStream.reset(self) + + def openStream(self, source): + """Produces a file object from source. + + source can be either a file object, local filename or a string. + + """ + # Already a file object + if hasattr(source, 'read'): + stream = source + else: + stream = BytesIO(source) + + try: + stream.seek(stream.tell()) + except Exception: + stream = BufferedStream(stream) + + return stream + + def determineEncoding(self, chardet=True): + # BOMs take precedence over everything + # This will also read past the BOM if present + charEncoding = self.detectBOM(), "certain" + if charEncoding[0] is not None: + return charEncoding + + # If we've been overridden, we've been overridden + charEncoding = lookupEncoding(self.override_encoding), "certain" + if charEncoding[0] is not None: + return charEncoding + + # Now check the transport layer + charEncoding = lookupEncoding(self.transport_encoding), "certain" + if charEncoding[0] is not None: + return charEncoding + + # Look for meta elements with encoding information + charEncoding = self.detectEncodingMeta(), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Parent document encoding + charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative" + if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"): + return charEncoding + + # "likely" encoding + charEncoding = lookupEncoding(self.likely_encoding), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Guess with chardet, if available + if chardet: + try: + from pip._vendor.chardet.universaldetector import UniversalDetector + except ImportError: + pass + else: + buffers = [] + detector = UniversalDetector() + while not detector.done: + buffer = self.rawStream.read(self.numBytesChardet) + assert isinstance(buffer, bytes) + if not buffer: + break + buffers.append(buffer) + detector.feed(buffer) + detector.close() + encoding = lookupEncoding(detector.result['encoding']) + self.rawStream.seek(0) + if encoding is not None: + return encoding, "tentative" + + # Try the default encoding + charEncoding = lookupEncoding(self.default_encoding), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Fallback to html5lib's default if even that hasn't worked + return lookupEncoding("windows-1252"), "tentative" + + def changeEncoding(self, newEncoding): + assert self.charEncoding[1] != "certain" + newEncoding = lookupEncoding(newEncoding) + if newEncoding is None: + return + if newEncoding.name in ("utf-16be", "utf-16le"): + newEncoding = lookupEncoding("utf-8") + assert newEncoding is not None + elif newEncoding == self.charEncoding[0]: + self.charEncoding = (self.charEncoding[0], "certain") + else: + self.rawStream.seek(0) + self.charEncoding = (newEncoding, "certain") + self.reset() + raise _ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding)) + + def detectBOM(self): + """Attempts to detect at BOM at the start of the stream. If + an encoding can be determined from the BOM return the name of the + encoding otherwise return None""" + bomDict = { + codecs.BOM_UTF8: 'utf-8', + codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be', + codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be' + } + + # Go to beginning of file and read in 4 bytes + string = self.rawStream.read(4) + assert isinstance(string, bytes) + + # Try detecting the BOM using bytes from the string + encoding = bomDict.get(string[:3]) # UTF-8 + seek = 3 + if not encoding: + # Need to detect UTF-32 before UTF-16 + encoding = bomDict.get(string) # UTF-32 + seek = 4 + if not encoding: + encoding = bomDict.get(string[:2]) # UTF-16 + seek = 2 + + # Set the read position past the BOM if one was found, otherwise + # set it to the start of the stream + if encoding: + self.rawStream.seek(seek) + return lookupEncoding(encoding) + else: + self.rawStream.seek(0) + return None + + def detectEncodingMeta(self): + """Report the encoding declared by the meta element + """ + buffer = self.rawStream.read(self.numBytesMeta) + assert isinstance(buffer, bytes) + parser = EncodingParser(buffer) + self.rawStream.seek(0) + encoding = parser.getEncoding() + + if encoding is not None and encoding.name in ("utf-16be", "utf-16le"): + encoding = lookupEncoding("utf-8") + + return encoding + + +class EncodingBytes(bytes): + """String-like object with an associated position and various extra methods + If the position is ever greater than the string length then an exception is + raised""" + def __new__(self, value): + assert isinstance(value, bytes) + return bytes.__new__(self, value.lower()) + + def __init__(self, value): + # pylint:disable=unused-argument + self._position = -1 + + def __iter__(self): + return self + + def __next__(self): + p = self._position = self._position + 1 + if p >= len(self): + raise StopIteration + elif p < 0: + raise TypeError + return self[p:p + 1] + + def next(self): + # Py2 compat + return self.__next__() + + def previous(self): + p = self._position + if p >= len(self): + raise StopIteration + elif p < 0: + raise TypeError + self._position = p = p - 1 + return self[p:p + 1] + + def setPosition(self, position): + if self._position >= len(self): + raise StopIteration + self._position = position + + def getPosition(self): + if self._position >= len(self): + raise StopIteration + if self._position >= 0: + return self._position + else: + return None + + position = property(getPosition, setPosition) + + def getCurrentByte(self): + return self[self.position:self.position + 1] + + currentByte = property(getCurrentByte) + + def skip(self, chars=spaceCharactersBytes): + """Skip past a list of characters""" + p = self.position # use property for the error-checking + while p < len(self): + c = self[p:p + 1] + if c not in chars: + self._position = p + return c + p += 1 + self._position = p + return None + + def skipUntil(self, chars): + p = self.position + while p < len(self): + c = self[p:p + 1] + if c in chars: + self._position = p + return c + p += 1 + self._position = p + return None + + def matchBytes(self, bytes): + """Look for a sequence of bytes at the start of a string. If the bytes + are found return True and advance the position to the byte after the + match. Otherwise return False and leave the position alone""" + rv = self.startswith(bytes, self.position) + if rv: + self.position += len(bytes) + return rv + + def jumpTo(self, bytes): + """Look for the next sequence of bytes matching a given sequence. If + a match is found advance the position to the last byte of the match""" + try: + self._position = self.index(bytes, self.position) + len(bytes) - 1 + except ValueError: + raise StopIteration + return True + + +class EncodingParser(object): + """Mini parser for detecting character encoding from meta elements""" + + def __init__(self, data): + """string - the data to work on for encoding detection""" + self.data = EncodingBytes(data) + self.encoding = None + + def getEncoding(self): + if b"") + + def handleMeta(self): + if self.data.currentByte not in spaceCharactersBytes: + # if we have ") + + def getAttribute(self): + """Return a name,value pair for the next attribute in the stream, + if one is found, or None""" + data = self.data + # Step 1 (skip chars) + c = data.skip(spaceCharactersBytes | frozenset([b"/"])) + assert c is None or len(c) == 1 + # Step 2 + if c in (b">", None): + return None + # Step 3 + attrName = [] + attrValue = [] + # Step 4 attribute name + while True: + if c == b"=" and attrName: + break + elif c in spaceCharactersBytes: + # Step 6! + c = data.skip() + break + elif c in (b"/", b">"): + return b"".join(attrName), b"" + elif c in asciiUppercaseBytes: + attrName.append(c.lower()) + elif c is None: + return None + else: + attrName.append(c) + # Step 5 + c = next(data) + # Step 7 + if c != b"=": + data.previous() + return b"".join(attrName), b"" + # Step 8 + next(data) + # Step 9 + c = data.skip() + # Step 10 + if c in (b"'", b'"'): + # 10.1 + quoteChar = c + while True: + # 10.2 + c = next(data) + # 10.3 + if c == quoteChar: + next(data) + return b"".join(attrName), b"".join(attrValue) + # 10.4 + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + # 10.5 + else: + attrValue.append(c) + elif c == b">": + return b"".join(attrName), b"" + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + elif c is None: + return None + else: + attrValue.append(c) + # Step 11 + while True: + c = next(data) + if c in spacesAngleBrackets: + return b"".join(attrName), b"".join(attrValue) + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + elif c is None: + return None + else: + attrValue.append(c) + + +class ContentAttrParser(object): + def __init__(self, data): + assert isinstance(data, bytes) + self.data = data + + def parse(self): + try: + # Check if the attr name is charset + # otherwise return + self.data.jumpTo(b"charset") + self.data.position += 1 + self.data.skip() + if not self.data.currentByte == b"=": + # If there is no = sign keep looking for attrs + return None + self.data.position += 1 + self.data.skip() + # Look for an encoding between matching quote marks + if self.data.currentByte in (b'"', b"'"): + quoteMark = self.data.currentByte + self.data.position += 1 + oldPosition = self.data.position + if self.data.jumpTo(quoteMark): + return self.data[oldPosition:self.data.position] + else: + return None + else: + # Unquoted value + oldPosition = self.data.position + try: + self.data.skipUntil(spaceCharactersBytes) + return self.data[oldPosition:self.data.position] + except StopIteration: + # Return the whole remaining value + return self.data[oldPosition:] + except StopIteration: + return None + + +def lookupEncoding(encoding): + """Return the python codec name corresponding to an encoding or None if the + string doesn't correspond to a valid encoding.""" + if isinstance(encoding, bytes): + try: + encoding = encoding.decode("ascii") + except UnicodeDecodeError: + return None + + if encoding is not None: + try: + return webencodings.lookup(encoding) + except AttributeError: + return None + else: + return None diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_tokenizer.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_tokenizer.py new file mode 100644 index 0000000..5f00253 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_tokenizer.py @@ -0,0 +1,1735 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import unichr as chr + +from collections import deque, OrderedDict +from sys import version_info + +from .constants import spaceCharacters +from .constants import entities +from .constants import asciiLetters, asciiUpper2Lower +from .constants import digits, hexDigits, EOF +from .constants import tokenTypes, tagTokenTypes +from .constants import replacementCharacters + +from ._inputstream import HTMLInputStream + +from ._trie import Trie + +entitiesTrie = Trie(entities) + +if version_info >= (3, 7): + attributeMap = dict +else: + attributeMap = OrderedDict + + +class HTMLTokenizer(object): + """ This class takes care of tokenizing HTML. + + * self.currentToken + Holds the token that is currently being processed. + + * self.state + Holds a reference to the method to be invoked... XXX + + * self.stream + Points to HTMLInputStream object. + """ + + def __init__(self, stream, parser=None, **kwargs): + + self.stream = HTMLInputStream(stream, **kwargs) + self.parser = parser + + # Setup the initial tokenizer state + self.escapeFlag = False + self.lastFourChars = [] + self.state = self.dataState + self.escape = False + + # The current token being created + self.currentToken = None + super(HTMLTokenizer, self).__init__() + + def __iter__(self): + """ This is where the magic happens. + + We do our usually processing through the states and when we have a token + to return we yield the token which pauses processing until the next token + is requested. + """ + self.tokenQueue = deque([]) + # Start processing. When EOF is reached self.state will return False + # instead of True and the loop will terminate. + while self.state(): + while self.stream.errors: + yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)} + while self.tokenQueue: + yield self.tokenQueue.popleft() + + def consumeNumberEntity(self, isHex): + """This function returns either U+FFFD or the character based on the + decimal or hexadecimal representation. It also discards ";" if present. + If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked. + """ + + allowed = digits + radix = 10 + if isHex: + allowed = hexDigits + radix = 16 + + charStack = [] + + # Consume all the characters that are in range while making sure we + # don't hit an EOF. + c = self.stream.char() + while c in allowed and c is not EOF: + charStack.append(c) + c = self.stream.char() + + # Convert the set of characters consumed to an int. + charAsInt = int("".join(charStack), radix) + + # Certain characters get replaced with others + if charAsInt in replacementCharacters: + char = replacementCharacters[charAsInt] + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + elif ((0xD800 <= charAsInt <= 0xDFFF) or + (charAsInt > 0x10FFFF)): + char = "\uFFFD" + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + else: + # Should speed up this check somehow (e.g. move the set to a constant) + if ((0x0001 <= charAsInt <= 0x0008) or + (0x000E <= charAsInt <= 0x001F) or + (0x007F <= charAsInt <= 0x009F) or + (0xFDD0 <= charAsInt <= 0xFDEF) or + charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE, + 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, + 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE, + 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE, + 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE, + 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, + 0xFFFFF, 0x10FFFE, 0x10FFFF])): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + try: + # Try/except needed as UCS-2 Python builds' unichar only works + # within the BMP. + char = chr(charAsInt) + except ValueError: + v = charAsInt - 0x10000 + char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF)) + + # Discard the ; if present. Otherwise, put it back on the queue and + # invoke parseError on parser. + if c != ";": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "numeric-entity-without-semicolon"}) + self.stream.unget(c) + + return char + + def consumeEntity(self, allowedChar=None, fromAttribute=False): + # Initialise to the default output for when no entity is matched + output = "&" + + charStack = [self.stream.char()] + if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or + (allowedChar is not None and allowedChar == charStack[0])): + self.stream.unget(charStack[0]) + + elif charStack[0] == "#": + # Read the next character to see if it's hex or decimal + hex = False + charStack.append(self.stream.char()) + if charStack[-1] in ("x", "X"): + hex = True + charStack.append(self.stream.char()) + + # charStack[-1] should be the first digit + if (hex and charStack[-1] in hexDigits) \ + or (not hex and charStack[-1] in digits): + # At least one digit found, so consume the whole number + self.stream.unget(charStack[-1]) + output = self.consumeNumberEntity(hex) + else: + # No digits found + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "expected-numeric-entity"}) + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + + else: + # At this point in the process might have named entity. Entities + # are stored in the global variable "entities". + # + # Consume characters and compare to these to a substring of the + # entity names in the list until the substring no longer matches. + while (charStack[-1] is not EOF): + if not entitiesTrie.has_keys_with_prefix("".join(charStack)): + break + charStack.append(self.stream.char()) + + # At this point we have a string that starts with some characters + # that may match an entity + # Try to find the longest entity the string will match to take care + # of ¬i for instance. + try: + entityName = entitiesTrie.longest_prefix("".join(charStack[:-1])) + entityLength = len(entityName) + except KeyError: + entityName = None + + if entityName is not None: + if entityName[-1] != ";": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "named-entity-without-semicolon"}) + if (entityName[-1] != ";" and fromAttribute and + (charStack[entityLength] in asciiLetters or + charStack[entityLength] in digits or + charStack[entityLength] == "=")): + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + else: + output = entities[entityName] + self.stream.unget(charStack.pop()) + output += "".join(charStack[entityLength:]) + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-named-entity"}) + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + + if fromAttribute: + self.currentToken["data"][-1][1] += output + else: + if output in spaceCharacters: + tokenType = "SpaceCharacters" + else: + tokenType = "Characters" + self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output}) + + def processEntityInAttribute(self, allowedChar): + """This method replaces the need for "entityInAttributeValueState". + """ + self.consumeEntity(allowedChar=allowedChar, fromAttribute=True) + + def emitCurrentToken(self): + """This method is a generic handler for emitting the tags. It also sets + the state to "data" because that's what's needed after a token has been + emitted. + """ + token = self.currentToken + # Add token to the queue to be yielded + if (token["type"] in tagTokenTypes): + token["name"] = token["name"].translate(asciiUpper2Lower) + if token["type"] == tokenTypes["StartTag"]: + raw = token["data"] + data = attributeMap(raw) + if len(raw) > len(data): + # we had some duplicated attribute, fix so first wins + data.update(raw[::-1]) + token["data"] = data + + if token["type"] == tokenTypes["EndTag"]: + if token["data"]: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "attributes-in-end-tag"}) + if token["selfClosing"]: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "self-closing-flag-on-end-tag"}) + self.tokenQueue.append(token) + self.state = self.dataState + + # Below are the various tokenizer states worked out. + def dataState(self): + data = self.stream.char() + if data == "&": + self.state = self.entityDataState + elif data == "<": + self.state = self.tagOpenState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\u0000"}) + elif data is EOF: + # Tokenization ends. + return False + elif data in spaceCharacters: + # Directly after emitting a token you switch back to the "data + # state". At that point spaceCharacters are important so they are + # emitted separately. + self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": + data + self.stream.charsUntil(spaceCharacters, True)}) + # No need to update lastFourChars here, since the first space will + # have already been appended to lastFourChars and will have broken + # any sequences + else: + chars = self.stream.charsUntil(("&", "<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def entityDataState(self): + self.consumeEntity() + self.state = self.dataState + return True + + def rcdataState(self): + data = self.stream.char() + if data == "&": + self.state = self.characterReferenceInRcdata + elif data == "<": + self.state = self.rcdataLessThanSignState + elif data == EOF: + # Tokenization ends. + return False + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data in spaceCharacters: + # Directly after emitting a token you switch back to the "data + # state". At that point spaceCharacters are important so they are + # emitted separately. + self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": + data + self.stream.charsUntil(spaceCharacters, True)}) + # No need to update lastFourChars here, since the first space will + # have already been appended to lastFourChars and will have broken + # any sequences + else: + chars = self.stream.charsUntil(("&", "<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def characterReferenceInRcdata(self): + self.consumeEntity() + self.state = self.rcdataState + return True + + def rawtextState(self): + data = self.stream.char() + if data == "<": + self.state = self.rawtextLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + # Tokenization ends. + return False + else: + chars = self.stream.charsUntil(("<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def scriptDataState(self): + data = self.stream.char() + if data == "<": + self.state = self.scriptDataLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + # Tokenization ends. + return False + else: + chars = self.stream.charsUntil(("<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def plaintextState(self): + data = self.stream.char() + if data == EOF: + # Tokenization ends. + return False + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + self.stream.charsUntil("\u0000")}) + return True + + def tagOpenState(self): + data = self.stream.char() + if data == "!": + self.state = self.markupDeclarationOpenState + elif data == "/": + self.state = self.closeTagOpenState + elif data in asciiLetters: + self.currentToken = {"type": tokenTypes["StartTag"], + "name": data, "data": [], + "selfClosing": False, + "selfClosingAcknowledged": False} + self.state = self.tagNameState + elif data == ">": + # XXX In theory it could be something besides a tag name. But + # do we really care? + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name-but-got-right-bracket"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"}) + self.state = self.dataState + elif data == "?": + # XXX In theory it could be something besides a tag name. But + # do we really care? + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name-but-got-question-mark"}) + self.stream.unget(data) + self.state = self.bogusCommentState + else: + # XXX + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.dataState + return True + + def closeTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.currentToken = {"type": tokenTypes["EndTag"], "name": data, + "data": [], "selfClosing": False} + self.state = self.tagNameState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-closing-tag-but-got-right-bracket"}) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-closing-tag-but-got-eof"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "": + self.emitCurrentToken() + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-tag-name"}) + self.state = self.dataState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] += "\uFFFD" + else: + self.currentToken["name"] += data + # (Don't use charsUntil here, because tag names are + # very short and it's faster to not do anything fancy) + return True + + def rcdataLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.temporaryBuffer = "" + self.state = self.rcdataEndTagOpenState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.rcdataState + return True + + def rcdataEndTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.temporaryBuffer += data + self.state = self.rcdataEndTagNameState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) + self.state = self.scriptDataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataEscapedState + elif data == EOF: + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataEscapedState + return True + + def scriptDataEscapedLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.temporaryBuffer = "" + self.state = self.scriptDataEscapedEndTagOpenState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data}) + self.temporaryBuffer = data + self.state = self.scriptDataDoubleEscapeStartState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.scriptDataEscapedState + return True + + def scriptDataEscapedEndTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.temporaryBuffer = data + self.state = self.scriptDataEscapedEndTagNameState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": ""))): + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + if self.temporaryBuffer.lower() == "script": + self.state = self.scriptDataDoubleEscapedState + else: + self.state = self.scriptDataEscapedState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.temporaryBuffer += data + else: + self.stream.unget(data) + self.state = self.scriptDataEscapedState + return True + + def scriptDataDoubleEscapedState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataDoubleEscapedDashState + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + return True + + def scriptDataDoubleEscapedDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataDoubleEscapedDashDashState + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataDoubleEscapedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapedDashDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) + self.state = self.scriptDataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataDoubleEscapedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapedLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"}) + self.temporaryBuffer = "" + self.state = self.scriptDataDoubleEscapeEndState + else: + self.stream.unget(data) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapeEndState(self): + data = self.stream.char() + if data in (spaceCharacters | frozenset(("/", ">"))): + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + if self.temporaryBuffer.lower() == "script": + self.state = self.scriptDataEscapedState + else: + self.state = self.scriptDataDoubleEscapedState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.temporaryBuffer += data + else: + self.stream.unget(data) + self.state = self.scriptDataDoubleEscapedState + return True + + def beforeAttributeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data in asciiLetters: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == ">": + self.emitCurrentToken() + elif data == "/": + self.state = self.selfClosingStartTagState + elif data in ("'", '"', "=", "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "invalid-character-in-attribute-name"}) + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"].append(["\uFFFD", ""]) + self.state = self.attributeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-name-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + return True + + def attributeNameState(self): + data = self.stream.char() + leavingThisState = True + emitToken = False + if data == "=": + self.state = self.beforeAttributeValueState + elif data in asciiLetters: + self.currentToken["data"][-1][0] += data +\ + self.stream.charsUntil(asciiLetters, True) + leavingThisState = False + elif data == ">": + # XXX If we emit here the attributes are converted to a dict + # without being checked and when the code below runs we error + # because data is a dict not a list + emitToken = True + elif data in spaceCharacters: + self.state = self.afterAttributeNameState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][0] += "\uFFFD" + leavingThisState = False + elif data in ("'", '"', "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "invalid-character-in-attribute-name"}) + self.currentToken["data"][-1][0] += data + leavingThisState = False + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "eof-in-attribute-name"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][0] += data + leavingThisState = False + + if leavingThisState: + # Attributes are not dropped at this stage. That happens when the + # start tag token is emitted so values can still be safely appended + # to attributes, but we do want to report the parse error in time. + self.currentToken["data"][-1][0] = ( + self.currentToken["data"][-1][0].translate(asciiUpper2Lower)) + for name, _ in self.currentToken["data"][:-1]: + if self.currentToken["data"][-1][0] == name: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "duplicate-attribute"}) + break + # XXX Fix for above XXX + if emitToken: + self.emitCurrentToken() + return True + + def afterAttributeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data == "=": + self.state = self.beforeAttributeValueState + elif data == ">": + self.emitCurrentToken() + elif data in asciiLetters: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"].append(["\uFFFD", ""]) + self.state = self.attributeNameState + elif data in ("'", '"', "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "invalid-character-after-attribute-name"}) + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-end-of-tag-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + return True + + def beforeAttributeValueState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data == "\"": + self.state = self.attributeValueDoubleQuotedState + elif data == "&": + self.state = self.attributeValueUnQuotedState + self.stream.unget(data) + elif data == "'": + self.state = self.attributeValueSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-value-but-got-right-bracket"}) + self.emitCurrentToken() + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + self.state = self.attributeValueUnQuotedState + elif data in ("=", "<", "`"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "equals-in-unquoted-attribute-value"}) + self.currentToken["data"][-1][1] += data + self.state = self.attributeValueUnQuotedState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-value-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data + self.state = self.attributeValueUnQuotedState + return True + + def attributeValueDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterAttributeValueState + elif data == "&": + self.processEntityInAttribute('"') + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-double-quote"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data +\ + self.stream.charsUntil(("\"", "&", "\u0000")) + return True + + def attributeValueSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterAttributeValueState + elif data == "&": + self.processEntityInAttribute("'") + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-single-quote"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data +\ + self.stream.charsUntil(("'", "&", "\u0000")) + return True + + def attributeValueUnQuotedState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeAttributeNameState + elif data == "&": + self.processEntityInAttribute(">") + elif data == ">": + self.emitCurrentToken() + elif data in ('"', "'", "=", "<", "`"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-in-unquoted-attribute-value"}) + self.currentToken["data"][-1][1] += data + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-no-quotes"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data + self.stream.charsUntil( + frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters) + return True + + def afterAttributeValueState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeAttributeNameState + elif data == ">": + self.emitCurrentToken() + elif data == "/": + self.state = self.selfClosingStartTagState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-EOF-after-attribute-value"}) + self.stream.unget(data) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-after-attribute-value"}) + self.stream.unget(data) + self.state = self.beforeAttributeNameState + return True + + def selfClosingStartTagState(self): + data = self.stream.char() + if data == ">": + self.currentToken["selfClosing"] = True + self.emitCurrentToken() + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "unexpected-EOF-after-solidus-in-tag"}) + self.stream.unget(data) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-after-solidus-in-tag"}) + self.stream.unget(data) + self.state = self.beforeAttributeNameState + return True + + def bogusCommentState(self): + # Make a new comment token and give it as value all the characters + # until the first > or EOF (charsUntil checks for EOF automatically) + # and emit it. + data = self.stream.charsUntil(">") + data = data.replace("\u0000", "\uFFFD") + self.tokenQueue.append( + {"type": tokenTypes["Comment"], "data": data}) + + # Eat the character directly after the bogus comment which is either a + # ">" or an EOF. + self.stream.char() + self.state = self.dataState + return True + + def markupDeclarationOpenState(self): + charStack = [self.stream.char()] + if charStack[-1] == "-": + charStack.append(self.stream.char()) + if charStack[-1] == "-": + self.currentToken = {"type": tokenTypes["Comment"], "data": ""} + self.state = self.commentStartState + return True + elif charStack[-1] in ('d', 'D'): + matched = True + for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'), + ('y', 'Y'), ('p', 'P'), ('e', 'E')): + charStack.append(self.stream.char()) + if charStack[-1] not in expected: + matched = False + break + if matched: + self.currentToken = {"type": tokenTypes["Doctype"], + "name": "", + "publicId": None, "systemId": None, + "correct": True} + self.state = self.doctypeState + return True + elif (charStack[-1] == "[" and + self.parser is not None and + self.parser.tree.openElements and + self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace): + matched = True + for expected in ["C", "D", "A", "T", "A", "["]: + charStack.append(self.stream.char()) + if charStack[-1] != expected: + matched = False + break + if matched: + self.state = self.cdataSectionState + return True + + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-dashes-or-doctype"}) + + while charStack: + self.stream.unget(charStack.pop()) + self.state = self.bogusCommentState + return True + + def commentStartState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentStartDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "incorrect-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += data + self.state = self.commentState + return True + + def commentStartDashState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "-\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "incorrect-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "-" + data + self.state = self.commentState + return True + + def commentState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += data + \ + self.stream.charsUntil(("-", "\u0000")) + return True + + def commentEndDashState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "-\uFFFD" + self.state = self.commentState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-end-dash"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "-" + data + self.state = self.commentState + return True + + def commentEndState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "--\uFFFD" + self.state = self.commentState + elif data == "!": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-bang-after-double-dash-in-comment"}) + self.state = self.commentEndBangState + elif data == "-": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-dash-after-double-dash-in-comment"}) + self.currentToken["data"] += data + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-double-dash"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + # XXX + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-comment"}) + self.currentToken["data"] += "--" + data + self.state = self.commentState + return True + + def commentEndBangState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "-": + self.currentToken["data"] += "--!" + self.state = self.commentEndDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "--!\uFFFD" + self.state = self.commentState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-end-bang-state"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "--!" + data + self.state = self.commentState + return True + + def doctypeState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-eof"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "need-space-after-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypeNameState + return True + + def beforeDoctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-right-bracket"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] = "\uFFFD" + self.state = self.doctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-eof"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["name"] = data + self.state = self.doctypeNameState + return True + + def doctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.state = self.afterDoctypeNameState + elif data == ">": + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] += "\uFFFD" + self.state = self.doctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype-name"}) + self.currentToken["correct"] = False + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["name"] += data + return True + + def afterDoctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.currentToken["correct"] = False + self.stream.unget(data) + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + if data in ("p", "P"): + matched = True + for expected in (("u", "U"), ("b", "B"), ("l", "L"), + ("i", "I"), ("c", "C")): + data = self.stream.char() + if data not in expected: + matched = False + break + if matched: + self.state = self.afterDoctypePublicKeywordState + return True + elif data in ("s", "S"): + matched = True + for expected in (("y", "Y"), ("s", "S"), ("t", "T"), + ("e", "E"), ("m", "M")): + data = self.stream.char() + if data not in expected: + matched = False + break + if matched: + self.state = self.afterDoctypeSystemKeywordState + return True + + # All the characters read before the current 'data' will be + # [a-zA-Z], so they're garbage in the bogus doctype and can be + # discarded; only the latest character might be '>' or EOF + # and needs to be ungetted + self.stream.unget(data) + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-space-or-right-bracket-in-doctype", "datavars": + {"data": data}}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + + return True + + def afterDoctypePublicKeywordState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypePublicIdentifierState + elif data in ("'", '"'): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypePublicIdentifierState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.stream.unget(data) + self.state = self.beforeDoctypePublicIdentifierState + return True + + def beforeDoctypePublicIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == "\"": + self.currentToken["publicId"] = "" + self.state = self.doctypePublicIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["publicId"] = "" + self.state = self.doctypePublicIdentifierSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def doctypePublicIdentifierDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterDoctypePublicIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["publicId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["publicId"] += data + return True + + def doctypePublicIdentifierSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterDoctypePublicIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["publicId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["publicId"] += data + return True + + def afterDoctypePublicIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.betweenDoctypePublicAndSystemIdentifiersState + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == '"': + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def betweenDoctypePublicAndSystemIdentifiersState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == '"': + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def afterDoctypeSystemKeywordState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypeSystemIdentifierState + elif data in ("'", '"'): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypeSystemIdentifierState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.stream.unget(data) + self.state = self.beforeDoctypeSystemIdentifierState + return True + + def beforeDoctypeSystemIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == "\"": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def doctypeSystemIdentifierDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterDoctypeSystemIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["systemId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["systemId"] += data + return True + + def doctypeSystemIdentifierSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterDoctypeSystemIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["systemId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["systemId"] += data + return True + + def afterDoctypeSystemIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.state = self.bogusDoctypeState + return True + + def bogusDoctypeState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + # XXX EMIT + self.stream.unget(data) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + pass + return True + + def cdataSectionState(self): + data = [] + while True: + data.append(self.stream.charsUntil("]")) + data.append(self.stream.charsUntil(">")) + char = self.stream.char() + if char == EOF: + break + else: + assert char == ">" + if data[-1][-2:] == "]]": + data[-1] = data[-1][:-2] + break + else: + data.append(char) + + data = "".join(data) # pylint:disable=redefined-variable-type + # Deal with null here rather than in the parser + nullCount = data.count("\u0000") + if nullCount > 0: + for _ in range(nullCount): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + data = data.replace("\u0000", "\uFFFD") + if data: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": data}) + self.state = self.dataState + return True diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__init__.py new file mode 100644 index 0000000..07bad5d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import, division, unicode_literals + +from .py import Trie + +__all__ = ["Trie"] diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..a4e046e Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-39.pyc new file mode 100644 index 0000000..84836af Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-39.pyc new file mode 100644 index 0000000..c3ad606 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/_base.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/_base.py new file mode 100644 index 0000000..6b71975 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/_base.py @@ -0,0 +1,40 @@ +from __future__ import absolute_import, division, unicode_literals + +try: + from collections.abc import Mapping +except ImportError: # Python 2.7 + from collections import Mapping + + +class Trie(Mapping): + """Abstract base class for tries""" + + def keys(self, prefix=None): + # pylint:disable=arguments-differ + keys = super(Trie, self).keys() + + if prefix is None: + return set(keys) + + return {x for x in keys if x.startswith(prefix)} + + def has_keys_with_prefix(self, prefix): + for key in self.keys(): + if key.startswith(prefix): + return True + + return False + + def longest_prefix(self, prefix): + if prefix in self: + return prefix + + for i in range(1, len(prefix) + 1): + if prefix[:-i] in self: + return prefix[:-i] + + raise KeyError(prefix) + + def longest_prefix_item(self, prefix): + lprefix = self.longest_prefix(prefix) + return (lprefix, self[lprefix]) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/py.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/py.py new file mode 100644 index 0000000..c178b21 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_trie/py.py @@ -0,0 +1,67 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import text_type + +from bisect import bisect_left + +from ._base import Trie as ABCTrie + + +class Trie(ABCTrie): + def __init__(self, data): + if not all(isinstance(x, text_type) for x in data.keys()): + raise TypeError("All keys must be strings") + + self._data = data + self._keys = sorted(data.keys()) + self._cachestr = "" + self._cachepoints = (0, len(data)) + + def __contains__(self, key): + return key in self._data + + def __len__(self): + return len(self._data) + + def __iter__(self): + return iter(self._data) + + def __getitem__(self, key): + return self._data[key] + + def keys(self, prefix=None): + if prefix is None or prefix == "" or not self._keys: + return set(self._keys) + + if prefix.startswith(self._cachestr): + lo, hi = self._cachepoints + start = i = bisect_left(self._keys, prefix, lo, hi) + else: + start = i = bisect_left(self._keys, prefix) + + keys = set() + if start == len(self._keys): + return keys + + while self._keys[i].startswith(prefix): + keys.add(self._keys[i]) + i += 1 + + self._cachestr = prefix + self._cachepoints = (start, i) + + return keys + + def has_keys_with_prefix(self, prefix): + if prefix in self._data: + return True + + if prefix.startswith(self._cachestr): + lo, hi = self._cachepoints + i = bisect_left(self._keys, prefix, lo, hi) + else: + i = bisect_left(self._keys, prefix) + + if i == len(self._keys): + return False + + return self._keys[i].startswith(prefix) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_utils.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_utils.py new file mode 100644 index 0000000..d7c4926 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/_utils.py @@ -0,0 +1,159 @@ +from __future__ import absolute_import, division, unicode_literals + +from types import ModuleType + +try: + from collections.abc import Mapping +except ImportError: + from collections import Mapping + +from pip._vendor.six import text_type, PY3 + +if PY3: + import xml.etree.ElementTree as default_etree +else: + try: + import xml.etree.cElementTree as default_etree + except ImportError: + import xml.etree.ElementTree as default_etree + + +__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", + "surrogatePairToCodepoint", "moduleFactoryFactory", + "supports_lone_surrogates"] + + +# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be +# caught by the below test. In general this would be any platform +# using UTF-16 as its encoding of unicode strings, such as +# Jython. This is because UTF-16 itself is based on the use of such +# surrogates, and there is no mechanism to further escape such +# escapes. +try: + _x = eval('"\\uD800"') # pylint:disable=eval-used + if not isinstance(_x, text_type): + # We need this with u"" because of http://bugs.jython.org/issue2039 + _x = eval('u"\\uD800"') # pylint:disable=eval-used + assert isinstance(_x, text_type) +except Exception: + supports_lone_surrogates = False +else: + supports_lone_surrogates = True + + +class MethodDispatcher(dict): + """Dict with 2 special properties: + + On initiation, keys that are lists, sets or tuples are converted to + multiple keys so accessing any one of the items in the original + list-like object returns the matching value + + md = MethodDispatcher({("foo", "bar"):"baz"}) + md["foo"] == "baz" + + A default value which can be set through the default attribute. + """ + + def __init__(self, items=()): + _dictEntries = [] + for name, value in items: + if isinstance(name, (list, tuple, frozenset, set)): + for item in name: + _dictEntries.append((item, value)) + else: + _dictEntries.append((name, value)) + dict.__init__(self, _dictEntries) + assert len(self) == len(_dictEntries) + self.default = None + + def __getitem__(self, key): + return dict.get(self, key, self.default) + + def __get__(self, instance, owner=None): + return BoundMethodDispatcher(instance, self) + + +class BoundMethodDispatcher(Mapping): + """Wraps a MethodDispatcher, binding its return values to `instance`""" + def __init__(self, instance, dispatcher): + self.instance = instance + self.dispatcher = dispatcher + + def __getitem__(self, key): + # see https://docs.python.org/3/reference/datamodel.html#object.__get__ + # on a function, __get__ is used to bind a function to an instance as a bound method + return self.dispatcher[key].__get__(self.instance) + + def get(self, key, default): + if key in self.dispatcher: + return self[key] + else: + return default + + def __iter__(self): + return iter(self.dispatcher) + + def __len__(self): + return len(self.dispatcher) + + def __contains__(self, key): + return key in self.dispatcher + + +# Some utility functions to deal with weirdness around UCS2 vs UCS4 +# python builds + +def isSurrogatePair(data): + return (len(data) == 2 and + ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and + ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF) + + +def surrogatePairToCodepoint(data): + char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 + + (ord(data[1]) - 0xDC00)) + return char_val + +# Module Factory Factory (no, this isn't Java, I know) +# Here to stop this being duplicated all over the place. + + +def moduleFactoryFactory(factory): + moduleCache = {} + + def moduleFactory(baseModule, *args, **kwargs): + if isinstance(ModuleType.__name__, type("")): + name = "_%s_factory" % baseModule.__name__ + else: + name = b"_%s_factory" % baseModule.__name__ + + kwargs_tuple = tuple(kwargs.items()) + + try: + return moduleCache[name][args][kwargs_tuple] + except KeyError: + mod = ModuleType(name) + objs = factory(baseModule, *args, **kwargs) + mod.__dict__.update(objs) + if "name" not in moduleCache: + moduleCache[name] = {} + if "args" not in moduleCache[name]: + moduleCache[name][args] = {} + if "kwargs" not in moduleCache[name][args]: + moduleCache[name][args][kwargs_tuple] = {} + moduleCache[name][args][kwargs_tuple] = mod + return mod + + return moduleFactory + + +def memoize(func): + cache = {} + + def wrapped(*args, **kwargs): + key = (tuple(args), tuple(kwargs.items())) + if key not in cache: + cache[key] = func(*args, **kwargs) + return cache[key] + + return wrapped diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/constants.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/constants.py new file mode 100644 index 0000000..fe3e237 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/constants.py @@ -0,0 +1,2946 @@ +from __future__ import absolute_import, division, unicode_literals + +import string + +EOF = None + +E = { + "null-character": + "Null character in input stream, replaced with U+FFFD.", + "invalid-codepoint": + "Invalid codepoint in stream.", + "incorrectly-placed-solidus": + "Solidus (/) incorrectly placed in tag.", + "incorrect-cr-newline-entity": + "Incorrect CR newline entity, replaced with LF.", + "illegal-windows-1252-entity": + "Entity used with illegal number (windows-1252 reference).", + "cant-convert-numeric-entity": + "Numeric entity couldn't be converted to character " + "(codepoint U+%(charAsInt)08x).", + "illegal-codepoint-for-numeric-entity": + "Numeric entity represents an illegal codepoint: " + "U+%(charAsInt)08x.", + "numeric-entity-without-semicolon": + "Numeric entity didn't end with ';'.", + "expected-numeric-entity-but-got-eof": + "Numeric entity expected. Got end of file instead.", + "expected-numeric-entity": + "Numeric entity expected but none found.", + "named-entity-without-semicolon": + "Named entity didn't end with ';'.", + "expected-named-entity": + "Named entity expected. Got none.", + "attributes-in-end-tag": + "End tag contains unexpected attributes.", + 'self-closing-flag-on-end-tag': + "End tag contains unexpected self-closing flag.", + "expected-tag-name-but-got-right-bracket": + "Expected tag name. Got '>' instead.", + "expected-tag-name-but-got-question-mark": + "Expected tag name. Got '?' instead. (HTML doesn't " + "support processing instructions.)", + "expected-tag-name": + "Expected tag name. Got something else instead", + "expected-closing-tag-but-got-right-bracket": + "Expected closing tag. Got '>' instead. Ignoring ''.", + "expected-closing-tag-but-got-eof": + "Expected closing tag. Unexpected end of file.", + "expected-closing-tag-but-got-char": + "Expected closing tag. Unexpected character '%(data)s' found.", + "eof-in-tag-name": + "Unexpected end of file in the tag name.", + "expected-attribute-name-but-got-eof": + "Unexpected end of file. Expected attribute name instead.", + "eof-in-attribute-name": + "Unexpected end of file in attribute name.", + "invalid-character-in-attribute-name": + "Invalid character in attribute name", + "duplicate-attribute": + "Dropped duplicate attribute on tag.", + "expected-end-of-tag-name-but-got-eof": + "Unexpected end of file. Expected = or end of tag.", + "expected-attribute-value-but-got-eof": + "Unexpected end of file. Expected attribute value.", + "expected-attribute-value-but-got-right-bracket": + "Expected attribute value. Got '>' instead.", + 'equals-in-unquoted-attribute-value': + "Unexpected = in unquoted attribute", + 'unexpected-character-in-unquoted-attribute-value': + "Unexpected character in unquoted attribute", + "invalid-character-after-attribute-name": + "Unexpected character after attribute name.", + "unexpected-character-after-attribute-value": + "Unexpected character after attribute value.", + "eof-in-attribute-value-double-quote": + "Unexpected end of file in attribute value (\").", + "eof-in-attribute-value-single-quote": + "Unexpected end of file in attribute value (').", + "eof-in-attribute-value-no-quotes": + "Unexpected end of file in attribute value.", + "unexpected-EOF-after-solidus-in-tag": + "Unexpected end of file in tag. Expected >", + "unexpected-character-after-solidus-in-tag": + "Unexpected character after / in tag. Expected >", + "expected-dashes-or-doctype": + "Expected '--' or 'DOCTYPE'. Not found.", + "unexpected-bang-after-double-dash-in-comment": + "Unexpected ! after -- in comment", + "unexpected-space-after-double-dash-in-comment": + "Unexpected space after -- in comment", + "incorrect-comment": + "Incorrect comment.", + "eof-in-comment": + "Unexpected end of file in comment.", + "eof-in-comment-end-dash": + "Unexpected end of file in comment (-)", + "unexpected-dash-after-double-dash-in-comment": + "Unexpected '-' after '--' found in comment.", + "eof-in-comment-double-dash": + "Unexpected end of file in comment (--).", + "eof-in-comment-end-space-state": + "Unexpected end of file in comment.", + "eof-in-comment-end-bang-state": + "Unexpected end of file in comment.", + "unexpected-char-in-comment": + "Unexpected character in comment found.", + "need-space-after-doctype": + "No space after literal string 'DOCTYPE'.", + "expected-doctype-name-but-got-right-bracket": + "Unexpected > character. Expected DOCTYPE name.", + "expected-doctype-name-but-got-eof": + "Unexpected end of file. Expected DOCTYPE name.", + "eof-in-doctype-name": + "Unexpected end of file in DOCTYPE name.", + "eof-in-doctype": + "Unexpected end of file in DOCTYPE.", + "expected-space-or-right-bracket-in-doctype": + "Expected space or '>'. Got '%(data)s'", + "unexpected-end-of-doctype": + "Unexpected end of DOCTYPE.", + "unexpected-char-in-doctype": + "Unexpected character in DOCTYPE.", + "eof-in-innerhtml": + "XXX innerHTML EOF", + "unexpected-doctype": + "Unexpected DOCTYPE. Ignored.", + "non-html-root": + "html needs to be the first start tag.", + "expected-doctype-but-got-eof": + "Unexpected End of file. Expected DOCTYPE.", + "unknown-doctype": + "Erroneous DOCTYPE.", + "expected-doctype-but-got-chars": + "Unexpected non-space characters. Expected DOCTYPE.", + "expected-doctype-but-got-start-tag": + "Unexpected start tag (%(name)s). Expected DOCTYPE.", + "expected-doctype-but-got-end-tag": + "Unexpected end tag (%(name)s). Expected DOCTYPE.", + "end-tag-after-implied-root": + "Unexpected end tag (%(name)s) after the (implied) root element.", + "expected-named-closing-tag-but-got-eof": + "Unexpected end of file. Expected end tag (%(name)s).", + "two-heads-are-not-better-than-one": + "Unexpected start tag head in existing head. Ignored.", + "unexpected-end-tag": + "Unexpected end tag (%(name)s). Ignored.", + "unexpected-start-tag-out-of-my-head": + "Unexpected start tag (%(name)s) that can be in head. Moved.", + "unexpected-start-tag": + "Unexpected start tag (%(name)s).", + "missing-end-tag": + "Missing end tag (%(name)s).", + "missing-end-tags": + "Missing end tags (%(name)s).", + "unexpected-start-tag-implies-end-tag": + "Unexpected start tag (%(startName)s) " + "implies end tag (%(endName)s).", + "unexpected-start-tag-treated-as": + "Unexpected start tag (%(originalName)s). Treated as %(newName)s.", + "deprecated-tag": + "Unexpected start tag %(name)s. Don't use it!", + "unexpected-start-tag-ignored": + "Unexpected start tag %(name)s. Ignored.", + "expected-one-end-tag-but-got-another": + "Unexpected end tag (%(gotName)s). " + "Missing end tag (%(expectedName)s).", + "end-tag-too-early": + "End tag (%(name)s) seen too early. Expected other end tag.", + "end-tag-too-early-named": + "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).", + "end-tag-too-early-ignored": + "End tag (%(name)s) seen too early. Ignored.", + "adoption-agency-1.1": + "End tag (%(name)s) violates step 1, " + "paragraph 1 of the adoption agency algorithm.", + "adoption-agency-1.2": + "End tag (%(name)s) violates step 1, " + "paragraph 2 of the adoption agency algorithm.", + "adoption-agency-1.3": + "End tag (%(name)s) violates step 1, " + "paragraph 3 of the adoption agency algorithm.", + "adoption-agency-4.4": + "End tag (%(name)s) violates step 4, " + "paragraph 4 of the adoption agency algorithm.", + "unexpected-end-tag-treated-as": + "Unexpected end tag (%(originalName)s). Treated as %(newName)s.", + "no-end-tag": + "This element (%(name)s) has no end tag.", + "unexpected-implied-end-tag-in-table": + "Unexpected implied end tag (%(name)s) in the table phase.", + "unexpected-implied-end-tag-in-table-body": + "Unexpected implied end tag (%(name)s) in the table body phase.", + "unexpected-char-implies-table-voodoo": + "Unexpected non-space characters in " + "table context caused voodoo mode.", + "unexpected-hidden-input-in-table": + "Unexpected input with type hidden in table context.", + "unexpected-form-in-table": + "Unexpected form in table context.", + "unexpected-start-tag-implies-table-voodoo": + "Unexpected start tag (%(name)s) in " + "table context caused voodoo mode.", + "unexpected-end-tag-implies-table-voodoo": + "Unexpected end tag (%(name)s) in " + "table context caused voodoo mode.", + "unexpected-cell-in-table-body": + "Unexpected table cell start tag (%(name)s) " + "in the table body phase.", + "unexpected-cell-end-tag": + "Got table cell end tag (%(name)s) " + "while required end tags are missing.", + "unexpected-end-tag-in-table-body": + "Unexpected end tag (%(name)s) in the table body phase. Ignored.", + "unexpected-implied-end-tag-in-table-row": + "Unexpected implied end tag (%(name)s) in the table row phase.", + "unexpected-end-tag-in-table-row": + "Unexpected end tag (%(name)s) in the table row phase. Ignored.", + "unexpected-select-in-select": + "Unexpected select start tag in the select phase " + "treated as select end tag.", + "unexpected-input-in-select": + "Unexpected input start tag in the select phase.", + "unexpected-start-tag-in-select": + "Unexpected start tag token (%(name)s in the select phase. " + "Ignored.", + "unexpected-end-tag-in-select": + "Unexpected end tag (%(name)s) in the select phase. Ignored.", + "unexpected-table-element-start-tag-in-select-in-table": + "Unexpected table element start tag (%(name)s) in the select in table phase.", + "unexpected-table-element-end-tag-in-select-in-table": + "Unexpected table element end tag (%(name)s) in the select in table phase.", + "unexpected-char-after-body": + "Unexpected non-space characters in the after body phase.", + "unexpected-start-tag-after-body": + "Unexpected start tag token (%(name)s)" + " in the after body phase.", + "unexpected-end-tag-after-body": + "Unexpected end tag token (%(name)s)" + " in the after body phase.", + "unexpected-char-in-frameset": + "Unexpected characters in the frameset phase. Characters ignored.", + "unexpected-start-tag-in-frameset": + "Unexpected start tag token (%(name)s)" + " in the frameset phase. Ignored.", + "unexpected-frameset-in-frameset-innerhtml": + "Unexpected end tag token (frameset) " + "in the frameset phase (innerHTML).", + "unexpected-end-tag-in-frameset": + "Unexpected end tag token (%(name)s)" + " in the frameset phase. Ignored.", + "unexpected-char-after-frameset": + "Unexpected non-space characters in the " + "after frameset phase. Ignored.", + "unexpected-start-tag-after-frameset": + "Unexpected start tag (%(name)s)" + " in the after frameset phase. Ignored.", + "unexpected-end-tag-after-frameset": + "Unexpected end tag (%(name)s)" + " in the after frameset phase. Ignored.", + "unexpected-end-tag-after-body-innerhtml": + "Unexpected end tag after body(innerHtml)", + "expected-eof-but-got-char": + "Unexpected non-space characters. Expected end of file.", + "expected-eof-but-got-start-tag": + "Unexpected start tag (%(name)s)" + ". Expected end of file.", + "expected-eof-but-got-end-tag": + "Unexpected end tag (%(name)s)" + ". Expected end of file.", + "eof-in-table": + "Unexpected end of file. Expected table content.", + "eof-in-select": + "Unexpected end of file. Expected select content.", + "eof-in-frameset": + "Unexpected end of file. Expected frameset content.", + "eof-in-script-in-script": + "Unexpected end of file. Expected script content.", + "eof-in-foreign-lands": + "Unexpected end of file. Expected foreign content", + "non-void-element-with-trailing-solidus": + "Trailing solidus not allowed on element %(name)s", + "unexpected-html-element-in-foreign-content": + "Element %(name)s not allowed in a non-html context", + "unexpected-end-tag-before-html": + "Unexpected end tag (%(name)s) before html.", + "unexpected-inhead-noscript-tag": + "Element %(name)s not allowed in a inhead-noscript context", + "eof-in-head-noscript": + "Unexpected end of file. Expected inhead-noscript content", + "char-in-head-noscript": + "Unexpected non-space character. Expected inhead-noscript content", + "XXX-undefined-error": + "Undefined error (this sucks and should be fixed)", +} + +namespaces = { + "html": "http://www.w3.org/1999/xhtml", + "mathml": "http://www.w3.org/1998/Math/MathML", + "svg": "http://www.w3.org/2000/svg", + "xlink": "http://www.w3.org/1999/xlink", + "xml": "http://www.w3.org/XML/1998/namespace", + "xmlns": "http://www.w3.org/2000/xmlns/" +} + +scopingElements = frozenset([ + (namespaces["html"], "applet"), + (namespaces["html"], "caption"), + (namespaces["html"], "html"), + (namespaces["html"], "marquee"), + (namespaces["html"], "object"), + (namespaces["html"], "table"), + (namespaces["html"], "td"), + (namespaces["html"], "th"), + (namespaces["mathml"], "mi"), + (namespaces["mathml"], "mo"), + (namespaces["mathml"], "mn"), + (namespaces["mathml"], "ms"), + (namespaces["mathml"], "mtext"), + (namespaces["mathml"], "annotation-xml"), + (namespaces["svg"], "foreignObject"), + (namespaces["svg"], "desc"), + (namespaces["svg"], "title"), +]) + +formattingElements = frozenset([ + (namespaces["html"], "a"), + (namespaces["html"], "b"), + (namespaces["html"], "big"), + (namespaces["html"], "code"), + (namespaces["html"], "em"), + (namespaces["html"], "font"), + (namespaces["html"], "i"), + (namespaces["html"], "nobr"), + (namespaces["html"], "s"), + (namespaces["html"], "small"), + (namespaces["html"], "strike"), + (namespaces["html"], "strong"), + (namespaces["html"], "tt"), + (namespaces["html"], "u") +]) + +specialElements = frozenset([ + (namespaces["html"], "address"), + (namespaces["html"], "applet"), + (namespaces["html"], "area"), + (namespaces["html"], "article"), + (namespaces["html"], "aside"), + (namespaces["html"], "base"), + (namespaces["html"], "basefont"), + (namespaces["html"], "bgsound"), + (namespaces["html"], "blockquote"), + (namespaces["html"], "body"), + (namespaces["html"], "br"), + (namespaces["html"], "button"), + (namespaces["html"], "caption"), + (namespaces["html"], "center"), + (namespaces["html"], "col"), + (namespaces["html"], "colgroup"), + (namespaces["html"], "command"), + (namespaces["html"], "dd"), + (namespaces["html"], "details"), + (namespaces["html"], "dir"), + (namespaces["html"], "div"), + (namespaces["html"], "dl"), + (namespaces["html"], "dt"), + (namespaces["html"], "embed"), + (namespaces["html"], "fieldset"), + (namespaces["html"], "figure"), + (namespaces["html"], "footer"), + (namespaces["html"], "form"), + (namespaces["html"], "frame"), + (namespaces["html"], "frameset"), + (namespaces["html"], "h1"), + (namespaces["html"], "h2"), + (namespaces["html"], "h3"), + (namespaces["html"], "h4"), + (namespaces["html"], "h5"), + (namespaces["html"], "h6"), + (namespaces["html"], "head"), + (namespaces["html"], "header"), + (namespaces["html"], "hr"), + (namespaces["html"], "html"), + (namespaces["html"], "iframe"), + # Note that image is commented out in the spec as "this isn't an + # element that can end up on the stack, so it doesn't matter," + (namespaces["html"], "image"), + (namespaces["html"], "img"), + (namespaces["html"], "input"), + (namespaces["html"], "isindex"), + (namespaces["html"], "li"), + (namespaces["html"], "link"), + (namespaces["html"], "listing"), + (namespaces["html"], "marquee"), + (namespaces["html"], "menu"), + (namespaces["html"], "meta"), + (namespaces["html"], "nav"), + (namespaces["html"], "noembed"), + (namespaces["html"], "noframes"), + (namespaces["html"], "noscript"), + (namespaces["html"], "object"), + (namespaces["html"], "ol"), + (namespaces["html"], "p"), + (namespaces["html"], "param"), + (namespaces["html"], "plaintext"), + (namespaces["html"], "pre"), + (namespaces["html"], "script"), + (namespaces["html"], "section"), + (namespaces["html"], "select"), + (namespaces["html"], "style"), + (namespaces["html"], "table"), + (namespaces["html"], "tbody"), + (namespaces["html"], "td"), + (namespaces["html"], "textarea"), + (namespaces["html"], "tfoot"), + (namespaces["html"], "th"), + (namespaces["html"], "thead"), + (namespaces["html"], "title"), + (namespaces["html"], "tr"), + (namespaces["html"], "ul"), + (namespaces["html"], "wbr"), + (namespaces["html"], "xmp"), + (namespaces["svg"], "foreignObject") +]) + +htmlIntegrationPointElements = frozenset([ + (namespaces["mathml"], "annotation-xml"), + (namespaces["svg"], "foreignObject"), + (namespaces["svg"], "desc"), + (namespaces["svg"], "title") +]) + +mathmlTextIntegrationPointElements = frozenset([ + (namespaces["mathml"], "mi"), + (namespaces["mathml"], "mo"), + (namespaces["mathml"], "mn"), + (namespaces["mathml"], "ms"), + (namespaces["mathml"], "mtext") +]) + +adjustSVGAttributes = { + "attributename": "attributeName", + "attributetype": "attributeType", + "basefrequency": "baseFrequency", + "baseprofile": "baseProfile", + "calcmode": "calcMode", + "clippathunits": "clipPathUnits", + "contentscripttype": "contentScriptType", + "contentstyletype": "contentStyleType", + "diffuseconstant": "diffuseConstant", + "edgemode": "edgeMode", + "externalresourcesrequired": "externalResourcesRequired", + "filterres": "filterRes", + "filterunits": "filterUnits", + "glyphref": "glyphRef", + "gradienttransform": "gradientTransform", + "gradientunits": "gradientUnits", + "kernelmatrix": "kernelMatrix", + "kernelunitlength": "kernelUnitLength", + "keypoints": "keyPoints", + "keysplines": "keySplines", + "keytimes": "keyTimes", + "lengthadjust": "lengthAdjust", + "limitingconeangle": "limitingConeAngle", + "markerheight": "markerHeight", + "markerunits": "markerUnits", + "markerwidth": "markerWidth", + "maskcontentunits": "maskContentUnits", + "maskunits": "maskUnits", + "numoctaves": "numOctaves", + "pathlength": "pathLength", + "patterncontentunits": "patternContentUnits", + "patterntransform": "patternTransform", + "patternunits": "patternUnits", + "pointsatx": "pointsAtX", + "pointsaty": "pointsAtY", + "pointsatz": "pointsAtZ", + "preservealpha": "preserveAlpha", + "preserveaspectratio": "preserveAspectRatio", + "primitiveunits": "primitiveUnits", + "refx": "refX", + "refy": "refY", + "repeatcount": "repeatCount", + "repeatdur": "repeatDur", + "requiredextensions": "requiredExtensions", + "requiredfeatures": "requiredFeatures", + "specularconstant": "specularConstant", + "specularexponent": "specularExponent", + "spreadmethod": "spreadMethod", + "startoffset": "startOffset", + "stddeviation": "stdDeviation", + "stitchtiles": "stitchTiles", + "surfacescale": "surfaceScale", + "systemlanguage": "systemLanguage", + "tablevalues": "tableValues", + "targetx": "targetX", + "targety": "targetY", + "textlength": "textLength", + "viewbox": "viewBox", + "viewtarget": "viewTarget", + "xchannelselector": "xChannelSelector", + "ychannelselector": "yChannelSelector", + "zoomandpan": "zoomAndPan" +} + +adjustMathMLAttributes = {"definitionurl": "definitionURL"} + +adjustForeignAttributes = { + "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]), + "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]), + "xlink:href": ("xlink", "href", namespaces["xlink"]), + "xlink:role": ("xlink", "role", namespaces["xlink"]), + "xlink:show": ("xlink", "show", namespaces["xlink"]), + "xlink:title": ("xlink", "title", namespaces["xlink"]), + "xlink:type": ("xlink", "type", namespaces["xlink"]), + "xml:base": ("xml", "base", namespaces["xml"]), + "xml:lang": ("xml", "lang", namespaces["xml"]), + "xml:space": ("xml", "space", namespaces["xml"]), + "xmlns": (None, "xmlns", namespaces["xmlns"]), + "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"]) +} + +unadjustForeignAttributes = {(ns, local): qname for qname, (prefix, local, ns) in + adjustForeignAttributes.items()} + +spaceCharacters = frozenset([ + "\t", + "\n", + "\u000C", + " ", + "\r" +]) + +tableInsertModeElements = frozenset([ + "table", + "tbody", + "tfoot", + "thead", + "tr" +]) + +asciiLowercase = frozenset(string.ascii_lowercase) +asciiUppercase = frozenset(string.ascii_uppercase) +asciiLetters = frozenset(string.ascii_letters) +digits = frozenset(string.digits) +hexDigits = frozenset(string.hexdigits) + +asciiUpper2Lower = {ord(c): ord(c.lower()) for c in string.ascii_uppercase} + +# Heading elements need to be ordered +headingElements = ( + "h1", + "h2", + "h3", + "h4", + "h5", + "h6" +) + +voidElements = frozenset([ + "base", + "command", + "event-source", + "link", + "meta", + "hr", + "br", + "img", + "embed", + "param", + "area", + "col", + "input", + "source", + "track" +]) + +cdataElements = frozenset(['title', 'textarea']) + +rcdataElements = frozenset([ + 'style', + 'script', + 'xmp', + 'iframe', + 'noembed', + 'noframes', + 'noscript' +]) + +booleanAttributes = { + "": frozenset(["irrelevant", "itemscope"]), + "style": frozenset(["scoped"]), + "img": frozenset(["ismap"]), + "audio": frozenset(["autoplay", "controls"]), + "video": frozenset(["autoplay", "controls"]), + "script": frozenset(["defer", "async"]), + "details": frozenset(["open"]), + "datagrid": frozenset(["multiple", "disabled"]), + "command": frozenset(["hidden", "disabled", "checked", "default"]), + "hr": frozenset(["noshade"]), + "menu": frozenset(["autosubmit"]), + "fieldset": frozenset(["disabled", "readonly"]), + "option": frozenset(["disabled", "readonly", "selected"]), + "optgroup": frozenset(["disabled", "readonly"]), + "button": frozenset(["disabled", "autofocus"]), + "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]), + "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]), + "output": frozenset(["disabled", "readonly"]), + "iframe": frozenset(["seamless"]), +} + +# entitiesWindows1252 has to be _ordered_ and needs to have an index. It +# therefore can't be a frozenset. +entitiesWindows1252 = ( + 8364, # 0x80 0x20AC EURO SIGN + 65533, # 0x81 UNDEFINED + 8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK + 402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK + 8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK + 8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS + 8224, # 0x86 0x2020 DAGGER + 8225, # 0x87 0x2021 DOUBLE DAGGER + 710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT + 8240, # 0x89 0x2030 PER MILLE SIGN + 352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON + 8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK + 338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE + 65533, # 0x8D UNDEFINED + 381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON + 65533, # 0x8F UNDEFINED + 65533, # 0x90 UNDEFINED + 8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK + 8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK + 8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK + 8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK + 8226, # 0x95 0x2022 BULLET + 8211, # 0x96 0x2013 EN DASH + 8212, # 0x97 0x2014 EM DASH + 732, # 0x98 0x02DC SMALL TILDE + 8482, # 0x99 0x2122 TRADE MARK SIGN + 353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON + 8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK + 339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE + 65533, # 0x9D UNDEFINED + 382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON + 376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS +) + +xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;']) + +entities = { + "AElig": "\xc6", + "AElig;": "\xc6", + "AMP": "&", + "AMP;": "&", + "Aacute": "\xc1", + "Aacute;": "\xc1", + "Abreve;": "\u0102", + "Acirc": "\xc2", + "Acirc;": "\xc2", + "Acy;": "\u0410", + "Afr;": "\U0001d504", + "Agrave": "\xc0", + "Agrave;": "\xc0", + "Alpha;": "\u0391", + "Amacr;": "\u0100", + "And;": "\u2a53", + "Aogon;": "\u0104", + "Aopf;": "\U0001d538", + "ApplyFunction;": "\u2061", + "Aring": "\xc5", + "Aring;": "\xc5", + "Ascr;": "\U0001d49c", + "Assign;": "\u2254", + "Atilde": "\xc3", + "Atilde;": "\xc3", + "Auml": "\xc4", + "Auml;": "\xc4", + "Backslash;": "\u2216", + "Barv;": "\u2ae7", + "Barwed;": "\u2306", + "Bcy;": "\u0411", + "Because;": "\u2235", + "Bernoullis;": "\u212c", + "Beta;": "\u0392", + "Bfr;": "\U0001d505", + "Bopf;": "\U0001d539", + "Breve;": "\u02d8", + "Bscr;": "\u212c", + "Bumpeq;": "\u224e", + "CHcy;": "\u0427", + "COPY": "\xa9", + "COPY;": "\xa9", + "Cacute;": "\u0106", + "Cap;": "\u22d2", + "CapitalDifferentialD;": "\u2145", + "Cayleys;": "\u212d", + "Ccaron;": "\u010c", + "Ccedil": "\xc7", + "Ccedil;": "\xc7", + "Ccirc;": "\u0108", + "Cconint;": "\u2230", + "Cdot;": "\u010a", + "Cedilla;": "\xb8", + "CenterDot;": "\xb7", + "Cfr;": "\u212d", + "Chi;": "\u03a7", + "CircleDot;": "\u2299", + "CircleMinus;": "\u2296", + "CirclePlus;": "\u2295", + "CircleTimes;": "\u2297", + "ClockwiseContourIntegral;": "\u2232", + "CloseCurlyDoubleQuote;": "\u201d", + "CloseCurlyQuote;": "\u2019", + "Colon;": "\u2237", + "Colone;": "\u2a74", + "Congruent;": "\u2261", + "Conint;": "\u222f", + "ContourIntegral;": "\u222e", + "Copf;": "\u2102", + "Coproduct;": "\u2210", + "CounterClockwiseContourIntegral;": "\u2233", + "Cross;": "\u2a2f", + "Cscr;": "\U0001d49e", + "Cup;": "\u22d3", + "CupCap;": "\u224d", + "DD;": "\u2145", + "DDotrahd;": "\u2911", + "DJcy;": "\u0402", + "DScy;": "\u0405", + "DZcy;": "\u040f", + "Dagger;": "\u2021", + "Darr;": "\u21a1", + "Dashv;": "\u2ae4", + "Dcaron;": "\u010e", + "Dcy;": "\u0414", + "Del;": "\u2207", + "Delta;": "\u0394", + "Dfr;": "\U0001d507", + "DiacriticalAcute;": "\xb4", + "DiacriticalDot;": "\u02d9", + "DiacriticalDoubleAcute;": "\u02dd", + "DiacriticalGrave;": "`", + "DiacriticalTilde;": "\u02dc", + "Diamond;": "\u22c4", + "DifferentialD;": "\u2146", + "Dopf;": "\U0001d53b", + "Dot;": "\xa8", + "DotDot;": "\u20dc", + "DotEqual;": "\u2250", + "DoubleContourIntegral;": "\u222f", + "DoubleDot;": "\xa8", + "DoubleDownArrow;": "\u21d3", + "DoubleLeftArrow;": "\u21d0", + "DoubleLeftRightArrow;": "\u21d4", + "DoubleLeftTee;": "\u2ae4", + "DoubleLongLeftArrow;": "\u27f8", + "DoubleLongLeftRightArrow;": "\u27fa", + "DoubleLongRightArrow;": "\u27f9", + "DoubleRightArrow;": "\u21d2", + "DoubleRightTee;": "\u22a8", + "DoubleUpArrow;": "\u21d1", + "DoubleUpDownArrow;": "\u21d5", + "DoubleVerticalBar;": "\u2225", + "DownArrow;": "\u2193", + "DownArrowBar;": "\u2913", + "DownArrowUpArrow;": "\u21f5", + "DownBreve;": "\u0311", + "DownLeftRightVector;": "\u2950", + "DownLeftTeeVector;": "\u295e", + "DownLeftVector;": "\u21bd", + "DownLeftVectorBar;": "\u2956", + "DownRightTeeVector;": "\u295f", + "DownRightVector;": "\u21c1", + "DownRightVectorBar;": "\u2957", + "DownTee;": "\u22a4", + "DownTeeArrow;": "\u21a7", + "Downarrow;": "\u21d3", + "Dscr;": "\U0001d49f", + "Dstrok;": "\u0110", + "ENG;": "\u014a", + "ETH": "\xd0", + "ETH;": "\xd0", + "Eacute": "\xc9", + "Eacute;": "\xc9", + "Ecaron;": "\u011a", + "Ecirc": "\xca", + "Ecirc;": "\xca", + "Ecy;": "\u042d", + "Edot;": "\u0116", + "Efr;": "\U0001d508", + "Egrave": "\xc8", + "Egrave;": "\xc8", + "Element;": "\u2208", + "Emacr;": "\u0112", + "EmptySmallSquare;": "\u25fb", + "EmptyVerySmallSquare;": "\u25ab", + "Eogon;": "\u0118", + "Eopf;": "\U0001d53c", + "Epsilon;": "\u0395", + "Equal;": "\u2a75", + "EqualTilde;": "\u2242", + "Equilibrium;": "\u21cc", + "Escr;": "\u2130", + "Esim;": "\u2a73", + "Eta;": "\u0397", + "Euml": "\xcb", + "Euml;": "\xcb", + "Exists;": "\u2203", + "ExponentialE;": "\u2147", + "Fcy;": "\u0424", + "Ffr;": "\U0001d509", + "FilledSmallSquare;": "\u25fc", + "FilledVerySmallSquare;": "\u25aa", + "Fopf;": "\U0001d53d", + "ForAll;": "\u2200", + "Fouriertrf;": "\u2131", + "Fscr;": "\u2131", + "GJcy;": "\u0403", + "GT": ">", + "GT;": ">", + "Gamma;": "\u0393", + "Gammad;": "\u03dc", + "Gbreve;": "\u011e", + "Gcedil;": "\u0122", + "Gcirc;": "\u011c", + "Gcy;": "\u0413", + "Gdot;": "\u0120", + "Gfr;": "\U0001d50a", + "Gg;": "\u22d9", + "Gopf;": "\U0001d53e", + "GreaterEqual;": "\u2265", + "GreaterEqualLess;": "\u22db", + "GreaterFullEqual;": "\u2267", + "GreaterGreater;": "\u2aa2", + "GreaterLess;": "\u2277", + "GreaterSlantEqual;": "\u2a7e", + "GreaterTilde;": "\u2273", + "Gscr;": "\U0001d4a2", + "Gt;": "\u226b", + "HARDcy;": "\u042a", + "Hacek;": "\u02c7", + "Hat;": "^", + "Hcirc;": "\u0124", + "Hfr;": "\u210c", + "HilbertSpace;": "\u210b", + "Hopf;": "\u210d", + "HorizontalLine;": "\u2500", + "Hscr;": "\u210b", + "Hstrok;": "\u0126", + "HumpDownHump;": "\u224e", + "HumpEqual;": "\u224f", + "IEcy;": "\u0415", + "IJlig;": "\u0132", + "IOcy;": "\u0401", + "Iacute": "\xcd", + "Iacute;": "\xcd", + "Icirc": "\xce", + "Icirc;": "\xce", + "Icy;": "\u0418", + "Idot;": "\u0130", + "Ifr;": "\u2111", + "Igrave": "\xcc", + "Igrave;": "\xcc", + "Im;": "\u2111", + "Imacr;": "\u012a", + "ImaginaryI;": "\u2148", + "Implies;": "\u21d2", + "Int;": "\u222c", + "Integral;": "\u222b", + "Intersection;": "\u22c2", + "InvisibleComma;": "\u2063", + "InvisibleTimes;": "\u2062", + "Iogon;": "\u012e", + "Iopf;": "\U0001d540", + "Iota;": "\u0399", + "Iscr;": "\u2110", + "Itilde;": "\u0128", + "Iukcy;": "\u0406", + "Iuml": "\xcf", + "Iuml;": "\xcf", + "Jcirc;": "\u0134", + "Jcy;": "\u0419", + "Jfr;": "\U0001d50d", + "Jopf;": "\U0001d541", + "Jscr;": "\U0001d4a5", + "Jsercy;": "\u0408", + "Jukcy;": "\u0404", + "KHcy;": "\u0425", + "KJcy;": "\u040c", + "Kappa;": "\u039a", + "Kcedil;": "\u0136", + "Kcy;": "\u041a", + "Kfr;": "\U0001d50e", + "Kopf;": "\U0001d542", + "Kscr;": "\U0001d4a6", + "LJcy;": "\u0409", + "LT": "<", + "LT;": "<", + "Lacute;": "\u0139", + "Lambda;": "\u039b", + "Lang;": "\u27ea", + "Laplacetrf;": "\u2112", + "Larr;": "\u219e", + "Lcaron;": "\u013d", + "Lcedil;": "\u013b", + "Lcy;": "\u041b", + "LeftAngleBracket;": "\u27e8", + "LeftArrow;": "\u2190", + "LeftArrowBar;": "\u21e4", + "LeftArrowRightArrow;": "\u21c6", + "LeftCeiling;": "\u2308", + "LeftDoubleBracket;": "\u27e6", + "LeftDownTeeVector;": "\u2961", + "LeftDownVector;": "\u21c3", + "LeftDownVectorBar;": "\u2959", + "LeftFloor;": "\u230a", + "LeftRightArrow;": "\u2194", + "LeftRightVector;": "\u294e", + "LeftTee;": "\u22a3", + "LeftTeeArrow;": "\u21a4", + "LeftTeeVector;": "\u295a", + "LeftTriangle;": "\u22b2", + "LeftTriangleBar;": "\u29cf", + "LeftTriangleEqual;": "\u22b4", + "LeftUpDownVector;": "\u2951", + "LeftUpTeeVector;": "\u2960", + "LeftUpVector;": "\u21bf", + "LeftUpVectorBar;": "\u2958", + "LeftVector;": "\u21bc", + "LeftVectorBar;": "\u2952", + "Leftarrow;": "\u21d0", + "Leftrightarrow;": "\u21d4", + "LessEqualGreater;": "\u22da", + "LessFullEqual;": "\u2266", + "LessGreater;": "\u2276", + "LessLess;": "\u2aa1", + "LessSlantEqual;": "\u2a7d", + "LessTilde;": "\u2272", + "Lfr;": "\U0001d50f", + "Ll;": "\u22d8", + "Lleftarrow;": "\u21da", + "Lmidot;": "\u013f", + "LongLeftArrow;": "\u27f5", + "LongLeftRightArrow;": "\u27f7", + "LongRightArrow;": "\u27f6", + "Longleftarrow;": "\u27f8", + "Longleftrightarrow;": "\u27fa", + "Longrightarrow;": "\u27f9", + "Lopf;": "\U0001d543", + "LowerLeftArrow;": "\u2199", + "LowerRightArrow;": "\u2198", + "Lscr;": "\u2112", + "Lsh;": "\u21b0", + "Lstrok;": "\u0141", + "Lt;": "\u226a", + "Map;": "\u2905", + "Mcy;": "\u041c", + "MediumSpace;": "\u205f", + "Mellintrf;": "\u2133", + "Mfr;": "\U0001d510", + "MinusPlus;": "\u2213", + "Mopf;": "\U0001d544", + "Mscr;": "\u2133", + "Mu;": "\u039c", + "NJcy;": "\u040a", + "Nacute;": "\u0143", + "Ncaron;": "\u0147", + "Ncedil;": "\u0145", + "Ncy;": "\u041d", + "NegativeMediumSpace;": "\u200b", + "NegativeThickSpace;": "\u200b", + "NegativeThinSpace;": "\u200b", + "NegativeVeryThinSpace;": "\u200b", + "NestedGreaterGreater;": "\u226b", + "NestedLessLess;": "\u226a", + "NewLine;": "\n", + "Nfr;": "\U0001d511", + "NoBreak;": "\u2060", + "NonBreakingSpace;": "\xa0", + "Nopf;": "\u2115", + "Not;": "\u2aec", + "NotCongruent;": "\u2262", + "NotCupCap;": "\u226d", + "NotDoubleVerticalBar;": "\u2226", + "NotElement;": "\u2209", + "NotEqual;": "\u2260", + "NotEqualTilde;": "\u2242\u0338", + "NotExists;": "\u2204", + "NotGreater;": "\u226f", + "NotGreaterEqual;": "\u2271", + "NotGreaterFullEqual;": "\u2267\u0338", + "NotGreaterGreater;": "\u226b\u0338", + "NotGreaterLess;": "\u2279", + "NotGreaterSlantEqual;": "\u2a7e\u0338", + "NotGreaterTilde;": "\u2275", + "NotHumpDownHump;": "\u224e\u0338", + "NotHumpEqual;": "\u224f\u0338", + "NotLeftTriangle;": "\u22ea", + "NotLeftTriangleBar;": "\u29cf\u0338", + "NotLeftTriangleEqual;": "\u22ec", + "NotLess;": "\u226e", + "NotLessEqual;": "\u2270", + "NotLessGreater;": "\u2278", + "NotLessLess;": "\u226a\u0338", + "NotLessSlantEqual;": "\u2a7d\u0338", + "NotLessTilde;": "\u2274", + "NotNestedGreaterGreater;": "\u2aa2\u0338", + "NotNestedLessLess;": "\u2aa1\u0338", + "NotPrecedes;": "\u2280", + "NotPrecedesEqual;": "\u2aaf\u0338", + "NotPrecedesSlantEqual;": "\u22e0", + "NotReverseElement;": "\u220c", + "NotRightTriangle;": "\u22eb", + "NotRightTriangleBar;": "\u29d0\u0338", + "NotRightTriangleEqual;": "\u22ed", + "NotSquareSubset;": "\u228f\u0338", + "NotSquareSubsetEqual;": "\u22e2", + "NotSquareSuperset;": "\u2290\u0338", + "NotSquareSupersetEqual;": "\u22e3", + "NotSubset;": "\u2282\u20d2", + "NotSubsetEqual;": "\u2288", + "NotSucceeds;": "\u2281", + "NotSucceedsEqual;": "\u2ab0\u0338", + "NotSucceedsSlantEqual;": "\u22e1", + "NotSucceedsTilde;": "\u227f\u0338", + "NotSuperset;": "\u2283\u20d2", + "NotSupersetEqual;": "\u2289", + "NotTilde;": "\u2241", + "NotTildeEqual;": "\u2244", + "NotTildeFullEqual;": "\u2247", + "NotTildeTilde;": "\u2249", + "NotVerticalBar;": "\u2224", + "Nscr;": "\U0001d4a9", + "Ntilde": "\xd1", + "Ntilde;": "\xd1", + "Nu;": "\u039d", + "OElig;": "\u0152", + "Oacute": "\xd3", + "Oacute;": "\xd3", + "Ocirc": "\xd4", + "Ocirc;": "\xd4", + "Ocy;": "\u041e", + "Odblac;": "\u0150", + "Ofr;": "\U0001d512", + "Ograve": "\xd2", + "Ograve;": "\xd2", + "Omacr;": "\u014c", + "Omega;": "\u03a9", + "Omicron;": "\u039f", + "Oopf;": "\U0001d546", + "OpenCurlyDoubleQuote;": "\u201c", + "OpenCurlyQuote;": "\u2018", + "Or;": "\u2a54", + "Oscr;": "\U0001d4aa", + "Oslash": "\xd8", + "Oslash;": "\xd8", + "Otilde": "\xd5", + "Otilde;": "\xd5", + "Otimes;": "\u2a37", + "Ouml": "\xd6", + "Ouml;": "\xd6", + "OverBar;": "\u203e", + "OverBrace;": "\u23de", + "OverBracket;": "\u23b4", + "OverParenthesis;": "\u23dc", + "PartialD;": "\u2202", + "Pcy;": "\u041f", + "Pfr;": "\U0001d513", + "Phi;": "\u03a6", + "Pi;": "\u03a0", + "PlusMinus;": "\xb1", + "Poincareplane;": "\u210c", + "Popf;": "\u2119", + "Pr;": "\u2abb", + "Precedes;": "\u227a", + "PrecedesEqual;": "\u2aaf", + "PrecedesSlantEqual;": "\u227c", + "PrecedesTilde;": "\u227e", + "Prime;": "\u2033", + "Product;": "\u220f", + "Proportion;": "\u2237", + "Proportional;": "\u221d", + "Pscr;": "\U0001d4ab", + "Psi;": "\u03a8", + "QUOT": "\"", + "QUOT;": "\"", + "Qfr;": "\U0001d514", + "Qopf;": "\u211a", + "Qscr;": "\U0001d4ac", + "RBarr;": "\u2910", + "REG": "\xae", + "REG;": "\xae", + "Racute;": "\u0154", + "Rang;": "\u27eb", + "Rarr;": "\u21a0", + "Rarrtl;": "\u2916", + "Rcaron;": "\u0158", + "Rcedil;": "\u0156", + "Rcy;": "\u0420", + "Re;": "\u211c", + "ReverseElement;": "\u220b", + "ReverseEquilibrium;": "\u21cb", + "ReverseUpEquilibrium;": "\u296f", + "Rfr;": "\u211c", + "Rho;": "\u03a1", + "RightAngleBracket;": "\u27e9", + "RightArrow;": "\u2192", + "RightArrowBar;": "\u21e5", + "RightArrowLeftArrow;": "\u21c4", + "RightCeiling;": "\u2309", + "RightDoubleBracket;": "\u27e7", + "RightDownTeeVector;": "\u295d", + "RightDownVector;": "\u21c2", + "RightDownVectorBar;": "\u2955", + "RightFloor;": "\u230b", + "RightTee;": "\u22a2", + "RightTeeArrow;": "\u21a6", + "RightTeeVector;": "\u295b", + "RightTriangle;": "\u22b3", + "RightTriangleBar;": "\u29d0", + "RightTriangleEqual;": "\u22b5", + "RightUpDownVector;": "\u294f", + "RightUpTeeVector;": "\u295c", + "RightUpVector;": "\u21be", + "RightUpVectorBar;": "\u2954", + "RightVector;": "\u21c0", + "RightVectorBar;": "\u2953", + "Rightarrow;": "\u21d2", + "Ropf;": "\u211d", + "RoundImplies;": "\u2970", + "Rrightarrow;": "\u21db", + "Rscr;": "\u211b", + "Rsh;": "\u21b1", + "RuleDelayed;": "\u29f4", + "SHCHcy;": "\u0429", + "SHcy;": "\u0428", + "SOFTcy;": "\u042c", + "Sacute;": "\u015a", + "Sc;": "\u2abc", + "Scaron;": "\u0160", + "Scedil;": "\u015e", + "Scirc;": "\u015c", + "Scy;": "\u0421", + "Sfr;": "\U0001d516", + "ShortDownArrow;": "\u2193", + "ShortLeftArrow;": "\u2190", + "ShortRightArrow;": "\u2192", + "ShortUpArrow;": "\u2191", + "Sigma;": "\u03a3", + "SmallCircle;": "\u2218", + "Sopf;": "\U0001d54a", + "Sqrt;": "\u221a", + "Square;": "\u25a1", + "SquareIntersection;": "\u2293", + "SquareSubset;": "\u228f", + "SquareSubsetEqual;": "\u2291", + "SquareSuperset;": "\u2290", + "SquareSupersetEqual;": "\u2292", + "SquareUnion;": "\u2294", + "Sscr;": "\U0001d4ae", + "Star;": "\u22c6", + "Sub;": "\u22d0", + "Subset;": "\u22d0", + "SubsetEqual;": "\u2286", + "Succeeds;": "\u227b", + "SucceedsEqual;": "\u2ab0", + "SucceedsSlantEqual;": "\u227d", + "SucceedsTilde;": "\u227f", + "SuchThat;": "\u220b", + "Sum;": "\u2211", + "Sup;": "\u22d1", + "Superset;": "\u2283", + "SupersetEqual;": "\u2287", + "Supset;": "\u22d1", + "THORN": "\xde", + "THORN;": "\xde", + "TRADE;": "\u2122", + "TSHcy;": "\u040b", + "TScy;": "\u0426", + "Tab;": "\t", + "Tau;": "\u03a4", + "Tcaron;": "\u0164", + "Tcedil;": "\u0162", + "Tcy;": "\u0422", + "Tfr;": "\U0001d517", + "Therefore;": "\u2234", + "Theta;": "\u0398", + "ThickSpace;": "\u205f\u200a", + "ThinSpace;": "\u2009", + "Tilde;": "\u223c", + "TildeEqual;": "\u2243", + "TildeFullEqual;": "\u2245", + "TildeTilde;": "\u2248", + "Topf;": "\U0001d54b", + "TripleDot;": "\u20db", + "Tscr;": "\U0001d4af", + "Tstrok;": "\u0166", + "Uacute": "\xda", + "Uacute;": "\xda", + "Uarr;": "\u219f", + "Uarrocir;": "\u2949", + "Ubrcy;": "\u040e", + "Ubreve;": "\u016c", + "Ucirc": "\xdb", + "Ucirc;": "\xdb", + "Ucy;": "\u0423", + "Udblac;": "\u0170", + "Ufr;": "\U0001d518", + "Ugrave": "\xd9", + "Ugrave;": "\xd9", + "Umacr;": "\u016a", + "UnderBar;": "_", + "UnderBrace;": "\u23df", + "UnderBracket;": "\u23b5", + "UnderParenthesis;": "\u23dd", + "Union;": "\u22c3", + "UnionPlus;": "\u228e", + "Uogon;": "\u0172", + "Uopf;": "\U0001d54c", + "UpArrow;": "\u2191", + "UpArrowBar;": "\u2912", + "UpArrowDownArrow;": "\u21c5", + "UpDownArrow;": "\u2195", + "UpEquilibrium;": "\u296e", + "UpTee;": "\u22a5", + "UpTeeArrow;": "\u21a5", + "Uparrow;": "\u21d1", + "Updownarrow;": "\u21d5", + "UpperLeftArrow;": "\u2196", + "UpperRightArrow;": "\u2197", + "Upsi;": "\u03d2", + "Upsilon;": "\u03a5", + "Uring;": "\u016e", + "Uscr;": "\U0001d4b0", + "Utilde;": "\u0168", + "Uuml": "\xdc", + "Uuml;": "\xdc", + "VDash;": "\u22ab", + "Vbar;": "\u2aeb", + "Vcy;": "\u0412", + "Vdash;": "\u22a9", + "Vdashl;": "\u2ae6", + "Vee;": "\u22c1", + "Verbar;": "\u2016", + "Vert;": "\u2016", + "VerticalBar;": "\u2223", + "VerticalLine;": "|", + "VerticalSeparator;": "\u2758", + "VerticalTilde;": "\u2240", + "VeryThinSpace;": "\u200a", + "Vfr;": "\U0001d519", + "Vopf;": "\U0001d54d", + "Vscr;": "\U0001d4b1", + "Vvdash;": "\u22aa", + "Wcirc;": "\u0174", + "Wedge;": "\u22c0", + "Wfr;": "\U0001d51a", + "Wopf;": "\U0001d54e", + "Wscr;": "\U0001d4b2", + "Xfr;": "\U0001d51b", + "Xi;": "\u039e", + "Xopf;": "\U0001d54f", + "Xscr;": "\U0001d4b3", + "YAcy;": "\u042f", + "YIcy;": "\u0407", + "YUcy;": "\u042e", + "Yacute": "\xdd", + "Yacute;": "\xdd", + "Ycirc;": "\u0176", + "Ycy;": "\u042b", + "Yfr;": "\U0001d51c", + "Yopf;": "\U0001d550", + "Yscr;": "\U0001d4b4", + "Yuml;": "\u0178", + "ZHcy;": "\u0416", + "Zacute;": "\u0179", + "Zcaron;": "\u017d", + "Zcy;": "\u0417", + "Zdot;": "\u017b", + "ZeroWidthSpace;": "\u200b", + "Zeta;": "\u0396", + "Zfr;": "\u2128", + "Zopf;": "\u2124", + "Zscr;": "\U0001d4b5", + "aacute": "\xe1", + "aacute;": "\xe1", + "abreve;": "\u0103", + "ac;": "\u223e", + "acE;": "\u223e\u0333", + "acd;": "\u223f", + "acirc": "\xe2", + "acirc;": "\xe2", + "acute": "\xb4", + "acute;": "\xb4", + "acy;": "\u0430", + "aelig": "\xe6", + "aelig;": "\xe6", + "af;": "\u2061", + "afr;": "\U0001d51e", + "agrave": "\xe0", + "agrave;": "\xe0", + "alefsym;": "\u2135", + "aleph;": "\u2135", + "alpha;": "\u03b1", + "amacr;": "\u0101", + "amalg;": "\u2a3f", + "amp": "&", + "amp;": "&", + "and;": "\u2227", + "andand;": "\u2a55", + "andd;": "\u2a5c", + "andslope;": "\u2a58", + "andv;": "\u2a5a", + "ang;": "\u2220", + "ange;": "\u29a4", + "angle;": "\u2220", + "angmsd;": "\u2221", + "angmsdaa;": "\u29a8", + "angmsdab;": "\u29a9", + "angmsdac;": "\u29aa", + "angmsdad;": "\u29ab", + "angmsdae;": "\u29ac", + "angmsdaf;": "\u29ad", + "angmsdag;": "\u29ae", + "angmsdah;": "\u29af", + "angrt;": "\u221f", + "angrtvb;": "\u22be", + "angrtvbd;": "\u299d", + "angsph;": "\u2222", + "angst;": "\xc5", + "angzarr;": "\u237c", + "aogon;": "\u0105", + "aopf;": "\U0001d552", + "ap;": "\u2248", + "apE;": "\u2a70", + "apacir;": "\u2a6f", + "ape;": "\u224a", + "apid;": "\u224b", + "apos;": "'", + "approx;": "\u2248", + "approxeq;": "\u224a", + "aring": "\xe5", + "aring;": "\xe5", + "ascr;": "\U0001d4b6", + "ast;": "*", + "asymp;": "\u2248", + "asympeq;": "\u224d", + "atilde": "\xe3", + "atilde;": "\xe3", + "auml": "\xe4", + "auml;": "\xe4", + "awconint;": "\u2233", + "awint;": "\u2a11", + "bNot;": "\u2aed", + "backcong;": "\u224c", + "backepsilon;": "\u03f6", + "backprime;": "\u2035", + "backsim;": "\u223d", + "backsimeq;": "\u22cd", + "barvee;": "\u22bd", + "barwed;": "\u2305", + "barwedge;": "\u2305", + "bbrk;": "\u23b5", + "bbrktbrk;": "\u23b6", + "bcong;": "\u224c", + "bcy;": "\u0431", + "bdquo;": "\u201e", + "becaus;": "\u2235", + "because;": "\u2235", + "bemptyv;": "\u29b0", + "bepsi;": "\u03f6", + "bernou;": "\u212c", + "beta;": "\u03b2", + "beth;": "\u2136", + "between;": "\u226c", + "bfr;": "\U0001d51f", + "bigcap;": "\u22c2", + "bigcirc;": "\u25ef", + "bigcup;": "\u22c3", + "bigodot;": "\u2a00", + "bigoplus;": "\u2a01", + "bigotimes;": "\u2a02", + "bigsqcup;": "\u2a06", + "bigstar;": "\u2605", + "bigtriangledown;": "\u25bd", + "bigtriangleup;": "\u25b3", + "biguplus;": "\u2a04", + "bigvee;": "\u22c1", + "bigwedge;": "\u22c0", + "bkarow;": "\u290d", + "blacklozenge;": "\u29eb", + "blacksquare;": "\u25aa", + "blacktriangle;": "\u25b4", + "blacktriangledown;": "\u25be", + "blacktriangleleft;": "\u25c2", + "blacktriangleright;": "\u25b8", + "blank;": "\u2423", + "blk12;": "\u2592", + "blk14;": "\u2591", + "blk34;": "\u2593", + "block;": "\u2588", + "bne;": "=\u20e5", + "bnequiv;": "\u2261\u20e5", + "bnot;": "\u2310", + "bopf;": "\U0001d553", + "bot;": "\u22a5", + "bottom;": "\u22a5", + "bowtie;": "\u22c8", + "boxDL;": "\u2557", + "boxDR;": "\u2554", + "boxDl;": "\u2556", + "boxDr;": "\u2553", + "boxH;": "\u2550", + "boxHD;": "\u2566", + "boxHU;": "\u2569", + "boxHd;": "\u2564", + "boxHu;": "\u2567", + "boxUL;": "\u255d", + "boxUR;": "\u255a", + "boxUl;": "\u255c", + "boxUr;": "\u2559", + "boxV;": "\u2551", + "boxVH;": "\u256c", + "boxVL;": "\u2563", + "boxVR;": "\u2560", + "boxVh;": "\u256b", + "boxVl;": "\u2562", + "boxVr;": "\u255f", + "boxbox;": "\u29c9", + "boxdL;": "\u2555", + "boxdR;": "\u2552", + "boxdl;": "\u2510", + "boxdr;": "\u250c", + "boxh;": "\u2500", + "boxhD;": "\u2565", + "boxhU;": "\u2568", + "boxhd;": "\u252c", + "boxhu;": "\u2534", + "boxminus;": "\u229f", + "boxplus;": "\u229e", + "boxtimes;": "\u22a0", + "boxuL;": "\u255b", + "boxuR;": "\u2558", + "boxul;": "\u2518", + "boxur;": "\u2514", + "boxv;": "\u2502", + "boxvH;": "\u256a", + "boxvL;": "\u2561", + "boxvR;": "\u255e", + "boxvh;": "\u253c", + "boxvl;": "\u2524", + "boxvr;": "\u251c", + "bprime;": "\u2035", + "breve;": "\u02d8", + "brvbar": "\xa6", + "brvbar;": "\xa6", + "bscr;": "\U0001d4b7", + "bsemi;": "\u204f", + "bsim;": "\u223d", + "bsime;": "\u22cd", + "bsol;": "\\", + "bsolb;": "\u29c5", + "bsolhsub;": "\u27c8", + "bull;": "\u2022", + "bullet;": "\u2022", + "bump;": "\u224e", + "bumpE;": "\u2aae", + "bumpe;": "\u224f", + "bumpeq;": "\u224f", + "cacute;": "\u0107", + "cap;": "\u2229", + "capand;": "\u2a44", + "capbrcup;": "\u2a49", + "capcap;": "\u2a4b", + "capcup;": "\u2a47", + "capdot;": "\u2a40", + "caps;": "\u2229\ufe00", + "caret;": "\u2041", + "caron;": "\u02c7", + "ccaps;": "\u2a4d", + "ccaron;": "\u010d", + "ccedil": "\xe7", + "ccedil;": "\xe7", + "ccirc;": "\u0109", + "ccups;": "\u2a4c", + "ccupssm;": "\u2a50", + "cdot;": "\u010b", + "cedil": "\xb8", + "cedil;": "\xb8", + "cemptyv;": "\u29b2", + "cent": "\xa2", + "cent;": "\xa2", + "centerdot;": "\xb7", + "cfr;": "\U0001d520", + "chcy;": "\u0447", + "check;": "\u2713", + "checkmark;": "\u2713", + "chi;": "\u03c7", + "cir;": "\u25cb", + "cirE;": "\u29c3", + "circ;": "\u02c6", + "circeq;": "\u2257", + "circlearrowleft;": "\u21ba", + "circlearrowright;": "\u21bb", + "circledR;": "\xae", + "circledS;": "\u24c8", + "circledast;": "\u229b", + "circledcirc;": "\u229a", + "circleddash;": "\u229d", + "cire;": "\u2257", + "cirfnint;": "\u2a10", + "cirmid;": "\u2aef", + "cirscir;": "\u29c2", + "clubs;": "\u2663", + "clubsuit;": "\u2663", + "colon;": ":", + "colone;": "\u2254", + "coloneq;": "\u2254", + "comma;": ",", + "commat;": "@", + "comp;": "\u2201", + "compfn;": "\u2218", + "complement;": "\u2201", + "complexes;": "\u2102", + "cong;": "\u2245", + "congdot;": "\u2a6d", + "conint;": "\u222e", + "copf;": "\U0001d554", + "coprod;": "\u2210", + "copy": "\xa9", + "copy;": "\xa9", + "copysr;": "\u2117", + "crarr;": "\u21b5", + "cross;": "\u2717", + "cscr;": "\U0001d4b8", + "csub;": "\u2acf", + "csube;": "\u2ad1", + "csup;": "\u2ad0", + "csupe;": "\u2ad2", + "ctdot;": "\u22ef", + "cudarrl;": "\u2938", + "cudarrr;": "\u2935", + "cuepr;": "\u22de", + "cuesc;": "\u22df", + "cularr;": "\u21b6", + "cularrp;": "\u293d", + "cup;": "\u222a", + "cupbrcap;": "\u2a48", + "cupcap;": "\u2a46", + "cupcup;": "\u2a4a", + "cupdot;": "\u228d", + "cupor;": "\u2a45", + "cups;": "\u222a\ufe00", + "curarr;": "\u21b7", + "curarrm;": "\u293c", + "curlyeqprec;": "\u22de", + "curlyeqsucc;": "\u22df", + "curlyvee;": "\u22ce", + "curlywedge;": "\u22cf", + "curren": "\xa4", + "curren;": "\xa4", + "curvearrowleft;": "\u21b6", + "curvearrowright;": "\u21b7", + "cuvee;": "\u22ce", + "cuwed;": "\u22cf", + "cwconint;": "\u2232", + "cwint;": "\u2231", + "cylcty;": "\u232d", + "dArr;": "\u21d3", + "dHar;": "\u2965", + "dagger;": "\u2020", + "daleth;": "\u2138", + "darr;": "\u2193", + "dash;": "\u2010", + "dashv;": "\u22a3", + "dbkarow;": "\u290f", + "dblac;": "\u02dd", + "dcaron;": "\u010f", + "dcy;": "\u0434", + "dd;": "\u2146", + "ddagger;": "\u2021", + "ddarr;": "\u21ca", + "ddotseq;": "\u2a77", + "deg": "\xb0", + "deg;": "\xb0", + "delta;": "\u03b4", + "demptyv;": "\u29b1", + "dfisht;": "\u297f", + "dfr;": "\U0001d521", + "dharl;": "\u21c3", + "dharr;": "\u21c2", + "diam;": "\u22c4", + "diamond;": "\u22c4", + "diamondsuit;": "\u2666", + "diams;": "\u2666", + "die;": "\xa8", + "digamma;": "\u03dd", + "disin;": "\u22f2", + "div;": "\xf7", + "divide": "\xf7", + "divide;": "\xf7", + "divideontimes;": "\u22c7", + "divonx;": "\u22c7", + "djcy;": "\u0452", + "dlcorn;": "\u231e", + "dlcrop;": "\u230d", + "dollar;": "$", + "dopf;": "\U0001d555", + "dot;": "\u02d9", + "doteq;": "\u2250", + "doteqdot;": "\u2251", + "dotminus;": "\u2238", + "dotplus;": "\u2214", + "dotsquare;": "\u22a1", + "doublebarwedge;": "\u2306", + "downarrow;": "\u2193", + "downdownarrows;": "\u21ca", + "downharpoonleft;": "\u21c3", + "downharpoonright;": "\u21c2", + "drbkarow;": "\u2910", + "drcorn;": "\u231f", + "drcrop;": "\u230c", + "dscr;": "\U0001d4b9", + "dscy;": "\u0455", + "dsol;": "\u29f6", + "dstrok;": "\u0111", + "dtdot;": "\u22f1", + "dtri;": "\u25bf", + "dtrif;": "\u25be", + "duarr;": "\u21f5", + "duhar;": "\u296f", + "dwangle;": "\u29a6", + "dzcy;": "\u045f", + "dzigrarr;": "\u27ff", + "eDDot;": "\u2a77", + "eDot;": "\u2251", + "eacute": "\xe9", + "eacute;": "\xe9", + "easter;": "\u2a6e", + "ecaron;": "\u011b", + "ecir;": "\u2256", + "ecirc": "\xea", + "ecirc;": "\xea", + "ecolon;": "\u2255", + "ecy;": "\u044d", + "edot;": "\u0117", + "ee;": "\u2147", + "efDot;": "\u2252", + "efr;": "\U0001d522", + "eg;": "\u2a9a", + "egrave": "\xe8", + "egrave;": "\xe8", + "egs;": "\u2a96", + "egsdot;": "\u2a98", + "el;": "\u2a99", + "elinters;": "\u23e7", + "ell;": "\u2113", + "els;": "\u2a95", + "elsdot;": "\u2a97", + "emacr;": "\u0113", + "empty;": "\u2205", + "emptyset;": "\u2205", + "emptyv;": "\u2205", + "emsp13;": "\u2004", + "emsp14;": "\u2005", + "emsp;": "\u2003", + "eng;": "\u014b", + "ensp;": "\u2002", + "eogon;": "\u0119", + "eopf;": "\U0001d556", + "epar;": "\u22d5", + "eparsl;": "\u29e3", + "eplus;": "\u2a71", + "epsi;": "\u03b5", + "epsilon;": "\u03b5", + "epsiv;": "\u03f5", + "eqcirc;": "\u2256", + "eqcolon;": "\u2255", + "eqsim;": "\u2242", + "eqslantgtr;": "\u2a96", + "eqslantless;": "\u2a95", + "equals;": "=", + "equest;": "\u225f", + "equiv;": "\u2261", + "equivDD;": "\u2a78", + "eqvparsl;": "\u29e5", + "erDot;": "\u2253", + "erarr;": "\u2971", + "escr;": "\u212f", + "esdot;": "\u2250", + "esim;": "\u2242", + "eta;": "\u03b7", + "eth": "\xf0", + "eth;": "\xf0", + "euml": "\xeb", + "euml;": "\xeb", + "euro;": "\u20ac", + "excl;": "!", + "exist;": "\u2203", + "expectation;": "\u2130", + "exponentiale;": "\u2147", + "fallingdotseq;": "\u2252", + "fcy;": "\u0444", + "female;": "\u2640", + "ffilig;": "\ufb03", + "fflig;": "\ufb00", + "ffllig;": "\ufb04", + "ffr;": "\U0001d523", + "filig;": "\ufb01", + "fjlig;": "fj", + "flat;": "\u266d", + "fllig;": "\ufb02", + "fltns;": "\u25b1", + "fnof;": "\u0192", + "fopf;": "\U0001d557", + "forall;": "\u2200", + "fork;": "\u22d4", + "forkv;": "\u2ad9", + "fpartint;": "\u2a0d", + "frac12": "\xbd", + "frac12;": "\xbd", + "frac13;": "\u2153", + "frac14": "\xbc", + "frac14;": "\xbc", + "frac15;": "\u2155", + "frac16;": "\u2159", + "frac18;": "\u215b", + "frac23;": "\u2154", + "frac25;": "\u2156", + "frac34": "\xbe", + "frac34;": "\xbe", + "frac35;": "\u2157", + "frac38;": "\u215c", + "frac45;": "\u2158", + "frac56;": "\u215a", + "frac58;": "\u215d", + "frac78;": "\u215e", + "frasl;": "\u2044", + "frown;": "\u2322", + "fscr;": "\U0001d4bb", + "gE;": "\u2267", + "gEl;": "\u2a8c", + "gacute;": "\u01f5", + "gamma;": "\u03b3", + "gammad;": "\u03dd", + "gap;": "\u2a86", + "gbreve;": "\u011f", + "gcirc;": "\u011d", + "gcy;": "\u0433", + "gdot;": "\u0121", + "ge;": "\u2265", + "gel;": "\u22db", + "geq;": "\u2265", + "geqq;": "\u2267", + "geqslant;": "\u2a7e", + "ges;": "\u2a7e", + "gescc;": "\u2aa9", + "gesdot;": "\u2a80", + "gesdoto;": "\u2a82", + "gesdotol;": "\u2a84", + "gesl;": "\u22db\ufe00", + "gesles;": "\u2a94", + "gfr;": "\U0001d524", + "gg;": "\u226b", + "ggg;": "\u22d9", + "gimel;": "\u2137", + "gjcy;": "\u0453", + "gl;": "\u2277", + "glE;": "\u2a92", + "gla;": "\u2aa5", + "glj;": "\u2aa4", + "gnE;": "\u2269", + "gnap;": "\u2a8a", + "gnapprox;": "\u2a8a", + "gne;": "\u2a88", + "gneq;": "\u2a88", + "gneqq;": "\u2269", + "gnsim;": "\u22e7", + "gopf;": "\U0001d558", + "grave;": "`", + "gscr;": "\u210a", + "gsim;": "\u2273", + "gsime;": "\u2a8e", + "gsiml;": "\u2a90", + "gt": ">", + "gt;": ">", + "gtcc;": "\u2aa7", + "gtcir;": "\u2a7a", + "gtdot;": "\u22d7", + "gtlPar;": "\u2995", + "gtquest;": "\u2a7c", + "gtrapprox;": "\u2a86", + "gtrarr;": "\u2978", + "gtrdot;": "\u22d7", + "gtreqless;": "\u22db", + "gtreqqless;": "\u2a8c", + "gtrless;": "\u2277", + "gtrsim;": "\u2273", + "gvertneqq;": "\u2269\ufe00", + "gvnE;": "\u2269\ufe00", + "hArr;": "\u21d4", + "hairsp;": "\u200a", + "half;": "\xbd", + "hamilt;": "\u210b", + "hardcy;": "\u044a", + "harr;": "\u2194", + "harrcir;": "\u2948", + "harrw;": "\u21ad", + "hbar;": "\u210f", + "hcirc;": "\u0125", + "hearts;": "\u2665", + "heartsuit;": "\u2665", + "hellip;": "\u2026", + "hercon;": "\u22b9", + "hfr;": "\U0001d525", + "hksearow;": "\u2925", + "hkswarow;": "\u2926", + "hoarr;": "\u21ff", + "homtht;": "\u223b", + "hookleftarrow;": "\u21a9", + "hookrightarrow;": "\u21aa", + "hopf;": "\U0001d559", + "horbar;": "\u2015", + "hscr;": "\U0001d4bd", + "hslash;": "\u210f", + "hstrok;": "\u0127", + "hybull;": "\u2043", + "hyphen;": "\u2010", + "iacute": "\xed", + "iacute;": "\xed", + "ic;": "\u2063", + "icirc": "\xee", + "icirc;": "\xee", + "icy;": "\u0438", + "iecy;": "\u0435", + "iexcl": "\xa1", + "iexcl;": "\xa1", + "iff;": "\u21d4", + "ifr;": "\U0001d526", + "igrave": "\xec", + "igrave;": "\xec", + "ii;": "\u2148", + "iiiint;": "\u2a0c", + "iiint;": "\u222d", + "iinfin;": "\u29dc", + "iiota;": "\u2129", + "ijlig;": "\u0133", + "imacr;": "\u012b", + "image;": "\u2111", + "imagline;": "\u2110", + "imagpart;": "\u2111", + "imath;": "\u0131", + "imof;": "\u22b7", + "imped;": "\u01b5", + "in;": "\u2208", + "incare;": "\u2105", + "infin;": "\u221e", + "infintie;": "\u29dd", + "inodot;": "\u0131", + "int;": "\u222b", + "intcal;": "\u22ba", + "integers;": "\u2124", + "intercal;": "\u22ba", + "intlarhk;": "\u2a17", + "intprod;": "\u2a3c", + "iocy;": "\u0451", + "iogon;": "\u012f", + "iopf;": "\U0001d55a", + "iota;": "\u03b9", + "iprod;": "\u2a3c", + "iquest": "\xbf", + "iquest;": "\xbf", + "iscr;": "\U0001d4be", + "isin;": "\u2208", + "isinE;": "\u22f9", + "isindot;": "\u22f5", + "isins;": "\u22f4", + "isinsv;": "\u22f3", + "isinv;": "\u2208", + "it;": "\u2062", + "itilde;": "\u0129", + "iukcy;": "\u0456", + "iuml": "\xef", + "iuml;": "\xef", + "jcirc;": "\u0135", + "jcy;": "\u0439", + "jfr;": "\U0001d527", + "jmath;": "\u0237", + "jopf;": "\U0001d55b", + "jscr;": "\U0001d4bf", + "jsercy;": "\u0458", + "jukcy;": "\u0454", + "kappa;": "\u03ba", + "kappav;": "\u03f0", + "kcedil;": "\u0137", + "kcy;": "\u043a", + "kfr;": "\U0001d528", + "kgreen;": "\u0138", + "khcy;": "\u0445", + "kjcy;": "\u045c", + "kopf;": "\U0001d55c", + "kscr;": "\U0001d4c0", + "lAarr;": "\u21da", + "lArr;": "\u21d0", + "lAtail;": "\u291b", + "lBarr;": "\u290e", + "lE;": "\u2266", + "lEg;": "\u2a8b", + "lHar;": "\u2962", + "lacute;": "\u013a", + "laemptyv;": "\u29b4", + "lagran;": "\u2112", + "lambda;": "\u03bb", + "lang;": "\u27e8", + "langd;": "\u2991", + "langle;": "\u27e8", + "lap;": "\u2a85", + "laquo": "\xab", + "laquo;": "\xab", + "larr;": "\u2190", + "larrb;": "\u21e4", + "larrbfs;": "\u291f", + "larrfs;": "\u291d", + "larrhk;": "\u21a9", + "larrlp;": "\u21ab", + "larrpl;": "\u2939", + "larrsim;": "\u2973", + "larrtl;": "\u21a2", + "lat;": "\u2aab", + "latail;": "\u2919", + "late;": "\u2aad", + "lates;": "\u2aad\ufe00", + "lbarr;": "\u290c", + "lbbrk;": "\u2772", + "lbrace;": "{", + "lbrack;": "[", + "lbrke;": "\u298b", + "lbrksld;": "\u298f", + "lbrkslu;": "\u298d", + "lcaron;": "\u013e", + "lcedil;": "\u013c", + "lceil;": "\u2308", + "lcub;": "{", + "lcy;": "\u043b", + "ldca;": "\u2936", + "ldquo;": "\u201c", + "ldquor;": "\u201e", + "ldrdhar;": "\u2967", + "ldrushar;": "\u294b", + "ldsh;": "\u21b2", + "le;": "\u2264", + "leftarrow;": "\u2190", + "leftarrowtail;": "\u21a2", + "leftharpoondown;": "\u21bd", + "leftharpoonup;": "\u21bc", + "leftleftarrows;": "\u21c7", + "leftrightarrow;": "\u2194", + "leftrightarrows;": "\u21c6", + "leftrightharpoons;": "\u21cb", + "leftrightsquigarrow;": "\u21ad", + "leftthreetimes;": "\u22cb", + "leg;": "\u22da", + "leq;": "\u2264", + "leqq;": "\u2266", + "leqslant;": "\u2a7d", + "les;": "\u2a7d", + "lescc;": "\u2aa8", + "lesdot;": "\u2a7f", + "lesdoto;": "\u2a81", + "lesdotor;": "\u2a83", + "lesg;": "\u22da\ufe00", + "lesges;": "\u2a93", + "lessapprox;": "\u2a85", + "lessdot;": "\u22d6", + "lesseqgtr;": "\u22da", + "lesseqqgtr;": "\u2a8b", + "lessgtr;": "\u2276", + "lesssim;": "\u2272", + "lfisht;": "\u297c", + "lfloor;": "\u230a", + "lfr;": "\U0001d529", + "lg;": "\u2276", + "lgE;": "\u2a91", + "lhard;": "\u21bd", + "lharu;": "\u21bc", + "lharul;": "\u296a", + "lhblk;": "\u2584", + "ljcy;": "\u0459", + "ll;": "\u226a", + "llarr;": "\u21c7", + "llcorner;": "\u231e", + "llhard;": "\u296b", + "lltri;": "\u25fa", + "lmidot;": "\u0140", + "lmoust;": "\u23b0", + "lmoustache;": "\u23b0", + "lnE;": "\u2268", + "lnap;": "\u2a89", + "lnapprox;": "\u2a89", + "lne;": "\u2a87", + "lneq;": "\u2a87", + "lneqq;": "\u2268", + "lnsim;": "\u22e6", + "loang;": "\u27ec", + "loarr;": "\u21fd", + "lobrk;": "\u27e6", + "longleftarrow;": "\u27f5", + "longleftrightarrow;": "\u27f7", + "longmapsto;": "\u27fc", + "longrightarrow;": "\u27f6", + "looparrowleft;": "\u21ab", + "looparrowright;": "\u21ac", + "lopar;": "\u2985", + "lopf;": "\U0001d55d", + "loplus;": "\u2a2d", + "lotimes;": "\u2a34", + "lowast;": "\u2217", + "lowbar;": "_", + "loz;": "\u25ca", + "lozenge;": "\u25ca", + "lozf;": "\u29eb", + "lpar;": "(", + "lparlt;": "\u2993", + "lrarr;": "\u21c6", + "lrcorner;": "\u231f", + "lrhar;": "\u21cb", + "lrhard;": "\u296d", + "lrm;": "\u200e", + "lrtri;": "\u22bf", + "lsaquo;": "\u2039", + "lscr;": "\U0001d4c1", + "lsh;": "\u21b0", + "lsim;": "\u2272", + "lsime;": "\u2a8d", + "lsimg;": "\u2a8f", + "lsqb;": "[", + "lsquo;": "\u2018", + "lsquor;": "\u201a", + "lstrok;": "\u0142", + "lt": "<", + "lt;": "<", + "ltcc;": "\u2aa6", + "ltcir;": "\u2a79", + "ltdot;": "\u22d6", + "lthree;": "\u22cb", + "ltimes;": "\u22c9", + "ltlarr;": "\u2976", + "ltquest;": "\u2a7b", + "ltrPar;": "\u2996", + "ltri;": "\u25c3", + "ltrie;": "\u22b4", + "ltrif;": "\u25c2", + "lurdshar;": "\u294a", + "luruhar;": "\u2966", + "lvertneqq;": "\u2268\ufe00", + "lvnE;": "\u2268\ufe00", + "mDDot;": "\u223a", + "macr": "\xaf", + "macr;": "\xaf", + "male;": "\u2642", + "malt;": "\u2720", + "maltese;": "\u2720", + "map;": "\u21a6", + "mapsto;": "\u21a6", + "mapstodown;": "\u21a7", + "mapstoleft;": "\u21a4", + "mapstoup;": "\u21a5", + "marker;": "\u25ae", + "mcomma;": "\u2a29", + "mcy;": "\u043c", + "mdash;": "\u2014", + "measuredangle;": "\u2221", + "mfr;": "\U0001d52a", + "mho;": "\u2127", + "micro": "\xb5", + "micro;": "\xb5", + "mid;": "\u2223", + "midast;": "*", + "midcir;": "\u2af0", + "middot": "\xb7", + "middot;": "\xb7", + "minus;": "\u2212", + "minusb;": "\u229f", + "minusd;": "\u2238", + "minusdu;": "\u2a2a", + "mlcp;": "\u2adb", + "mldr;": "\u2026", + "mnplus;": "\u2213", + "models;": "\u22a7", + "mopf;": "\U0001d55e", + "mp;": "\u2213", + "mscr;": "\U0001d4c2", + "mstpos;": "\u223e", + "mu;": "\u03bc", + "multimap;": "\u22b8", + "mumap;": "\u22b8", + "nGg;": "\u22d9\u0338", + "nGt;": "\u226b\u20d2", + "nGtv;": "\u226b\u0338", + "nLeftarrow;": "\u21cd", + "nLeftrightarrow;": "\u21ce", + "nLl;": "\u22d8\u0338", + "nLt;": "\u226a\u20d2", + "nLtv;": "\u226a\u0338", + "nRightarrow;": "\u21cf", + "nVDash;": "\u22af", + "nVdash;": "\u22ae", + "nabla;": "\u2207", + "nacute;": "\u0144", + "nang;": "\u2220\u20d2", + "nap;": "\u2249", + "napE;": "\u2a70\u0338", + "napid;": "\u224b\u0338", + "napos;": "\u0149", + "napprox;": "\u2249", + "natur;": "\u266e", + "natural;": "\u266e", + "naturals;": "\u2115", + "nbsp": "\xa0", + "nbsp;": "\xa0", + "nbump;": "\u224e\u0338", + "nbumpe;": "\u224f\u0338", + "ncap;": "\u2a43", + "ncaron;": "\u0148", + "ncedil;": "\u0146", + "ncong;": "\u2247", + "ncongdot;": "\u2a6d\u0338", + "ncup;": "\u2a42", + "ncy;": "\u043d", + "ndash;": "\u2013", + "ne;": "\u2260", + "neArr;": "\u21d7", + "nearhk;": "\u2924", + "nearr;": "\u2197", + "nearrow;": "\u2197", + "nedot;": "\u2250\u0338", + "nequiv;": "\u2262", + "nesear;": "\u2928", + "nesim;": "\u2242\u0338", + "nexist;": "\u2204", + "nexists;": "\u2204", + "nfr;": "\U0001d52b", + "ngE;": "\u2267\u0338", + "nge;": "\u2271", + "ngeq;": "\u2271", + "ngeqq;": "\u2267\u0338", + "ngeqslant;": "\u2a7e\u0338", + "nges;": "\u2a7e\u0338", + "ngsim;": "\u2275", + "ngt;": "\u226f", + "ngtr;": "\u226f", + "nhArr;": "\u21ce", + "nharr;": "\u21ae", + "nhpar;": "\u2af2", + "ni;": "\u220b", + "nis;": "\u22fc", + "nisd;": "\u22fa", + "niv;": "\u220b", + "njcy;": "\u045a", + "nlArr;": "\u21cd", + "nlE;": "\u2266\u0338", + "nlarr;": "\u219a", + "nldr;": "\u2025", + "nle;": "\u2270", + "nleftarrow;": "\u219a", + "nleftrightarrow;": "\u21ae", + "nleq;": "\u2270", + "nleqq;": "\u2266\u0338", + "nleqslant;": "\u2a7d\u0338", + "nles;": "\u2a7d\u0338", + "nless;": "\u226e", + "nlsim;": "\u2274", + "nlt;": "\u226e", + "nltri;": "\u22ea", + "nltrie;": "\u22ec", + "nmid;": "\u2224", + "nopf;": "\U0001d55f", + "not": "\xac", + "not;": "\xac", + "notin;": "\u2209", + "notinE;": "\u22f9\u0338", + "notindot;": "\u22f5\u0338", + "notinva;": "\u2209", + "notinvb;": "\u22f7", + "notinvc;": "\u22f6", + "notni;": "\u220c", + "notniva;": "\u220c", + "notnivb;": "\u22fe", + "notnivc;": "\u22fd", + "npar;": "\u2226", + "nparallel;": "\u2226", + "nparsl;": "\u2afd\u20e5", + "npart;": "\u2202\u0338", + "npolint;": "\u2a14", + "npr;": "\u2280", + "nprcue;": "\u22e0", + "npre;": "\u2aaf\u0338", + "nprec;": "\u2280", + "npreceq;": "\u2aaf\u0338", + "nrArr;": "\u21cf", + "nrarr;": "\u219b", + "nrarrc;": "\u2933\u0338", + "nrarrw;": "\u219d\u0338", + "nrightarrow;": "\u219b", + "nrtri;": "\u22eb", + "nrtrie;": "\u22ed", + "nsc;": "\u2281", + "nsccue;": "\u22e1", + "nsce;": "\u2ab0\u0338", + "nscr;": "\U0001d4c3", + "nshortmid;": "\u2224", + "nshortparallel;": "\u2226", + "nsim;": "\u2241", + "nsime;": "\u2244", + "nsimeq;": "\u2244", + "nsmid;": "\u2224", + "nspar;": "\u2226", + "nsqsube;": "\u22e2", + "nsqsupe;": "\u22e3", + "nsub;": "\u2284", + "nsubE;": "\u2ac5\u0338", + "nsube;": "\u2288", + "nsubset;": "\u2282\u20d2", + "nsubseteq;": "\u2288", + "nsubseteqq;": "\u2ac5\u0338", + "nsucc;": "\u2281", + "nsucceq;": "\u2ab0\u0338", + "nsup;": "\u2285", + "nsupE;": "\u2ac6\u0338", + "nsupe;": "\u2289", + "nsupset;": "\u2283\u20d2", + "nsupseteq;": "\u2289", + "nsupseteqq;": "\u2ac6\u0338", + "ntgl;": "\u2279", + "ntilde": "\xf1", + "ntilde;": "\xf1", + "ntlg;": "\u2278", + "ntriangleleft;": "\u22ea", + "ntrianglelefteq;": "\u22ec", + "ntriangleright;": "\u22eb", + "ntrianglerighteq;": "\u22ed", + "nu;": "\u03bd", + "num;": "#", + "numero;": "\u2116", + "numsp;": "\u2007", + "nvDash;": "\u22ad", + "nvHarr;": "\u2904", + "nvap;": "\u224d\u20d2", + "nvdash;": "\u22ac", + "nvge;": "\u2265\u20d2", + "nvgt;": ">\u20d2", + "nvinfin;": "\u29de", + "nvlArr;": "\u2902", + "nvle;": "\u2264\u20d2", + "nvlt;": "<\u20d2", + "nvltrie;": "\u22b4\u20d2", + "nvrArr;": "\u2903", + "nvrtrie;": "\u22b5\u20d2", + "nvsim;": "\u223c\u20d2", + "nwArr;": "\u21d6", + "nwarhk;": "\u2923", + "nwarr;": "\u2196", + "nwarrow;": "\u2196", + "nwnear;": "\u2927", + "oS;": "\u24c8", + "oacute": "\xf3", + "oacute;": "\xf3", + "oast;": "\u229b", + "ocir;": "\u229a", + "ocirc": "\xf4", + "ocirc;": "\xf4", + "ocy;": "\u043e", + "odash;": "\u229d", + "odblac;": "\u0151", + "odiv;": "\u2a38", + "odot;": "\u2299", + "odsold;": "\u29bc", + "oelig;": "\u0153", + "ofcir;": "\u29bf", + "ofr;": "\U0001d52c", + "ogon;": "\u02db", + "ograve": "\xf2", + "ograve;": "\xf2", + "ogt;": "\u29c1", + "ohbar;": "\u29b5", + "ohm;": "\u03a9", + "oint;": "\u222e", + "olarr;": "\u21ba", + "olcir;": "\u29be", + "olcross;": "\u29bb", + "oline;": "\u203e", + "olt;": "\u29c0", + "omacr;": "\u014d", + "omega;": "\u03c9", + "omicron;": "\u03bf", + "omid;": "\u29b6", + "ominus;": "\u2296", + "oopf;": "\U0001d560", + "opar;": "\u29b7", + "operp;": "\u29b9", + "oplus;": "\u2295", + "or;": "\u2228", + "orarr;": "\u21bb", + "ord;": "\u2a5d", + "order;": "\u2134", + "orderof;": "\u2134", + "ordf": "\xaa", + "ordf;": "\xaa", + "ordm": "\xba", + "ordm;": "\xba", + "origof;": "\u22b6", + "oror;": "\u2a56", + "orslope;": "\u2a57", + "orv;": "\u2a5b", + "oscr;": "\u2134", + "oslash": "\xf8", + "oslash;": "\xf8", + "osol;": "\u2298", + "otilde": "\xf5", + "otilde;": "\xf5", + "otimes;": "\u2297", + "otimesas;": "\u2a36", + "ouml": "\xf6", + "ouml;": "\xf6", + "ovbar;": "\u233d", + "par;": "\u2225", + "para": "\xb6", + "para;": "\xb6", + "parallel;": "\u2225", + "parsim;": "\u2af3", + "parsl;": "\u2afd", + "part;": "\u2202", + "pcy;": "\u043f", + "percnt;": "%", + "period;": ".", + "permil;": "\u2030", + "perp;": "\u22a5", + "pertenk;": "\u2031", + "pfr;": "\U0001d52d", + "phi;": "\u03c6", + "phiv;": "\u03d5", + "phmmat;": "\u2133", + "phone;": "\u260e", + "pi;": "\u03c0", + "pitchfork;": "\u22d4", + "piv;": "\u03d6", + "planck;": "\u210f", + "planckh;": "\u210e", + "plankv;": "\u210f", + "plus;": "+", + "plusacir;": "\u2a23", + "plusb;": "\u229e", + "pluscir;": "\u2a22", + "plusdo;": "\u2214", + "plusdu;": "\u2a25", + "pluse;": "\u2a72", + "plusmn": "\xb1", + "plusmn;": "\xb1", + "plussim;": "\u2a26", + "plustwo;": "\u2a27", + "pm;": "\xb1", + "pointint;": "\u2a15", + "popf;": "\U0001d561", + "pound": "\xa3", + "pound;": "\xa3", + "pr;": "\u227a", + "prE;": "\u2ab3", + "prap;": "\u2ab7", + "prcue;": "\u227c", + "pre;": "\u2aaf", + "prec;": "\u227a", + "precapprox;": "\u2ab7", + "preccurlyeq;": "\u227c", + "preceq;": "\u2aaf", + "precnapprox;": "\u2ab9", + "precneqq;": "\u2ab5", + "precnsim;": "\u22e8", + "precsim;": "\u227e", + "prime;": "\u2032", + "primes;": "\u2119", + "prnE;": "\u2ab5", + "prnap;": "\u2ab9", + "prnsim;": "\u22e8", + "prod;": "\u220f", + "profalar;": "\u232e", + "profline;": "\u2312", + "profsurf;": "\u2313", + "prop;": "\u221d", + "propto;": "\u221d", + "prsim;": "\u227e", + "prurel;": "\u22b0", + "pscr;": "\U0001d4c5", + "psi;": "\u03c8", + "puncsp;": "\u2008", + "qfr;": "\U0001d52e", + "qint;": "\u2a0c", + "qopf;": "\U0001d562", + "qprime;": "\u2057", + "qscr;": "\U0001d4c6", + "quaternions;": "\u210d", + "quatint;": "\u2a16", + "quest;": "?", + "questeq;": "\u225f", + "quot": "\"", + "quot;": "\"", + "rAarr;": "\u21db", + "rArr;": "\u21d2", + "rAtail;": "\u291c", + "rBarr;": "\u290f", + "rHar;": "\u2964", + "race;": "\u223d\u0331", + "racute;": "\u0155", + "radic;": "\u221a", + "raemptyv;": "\u29b3", + "rang;": "\u27e9", + "rangd;": "\u2992", + "range;": "\u29a5", + "rangle;": "\u27e9", + "raquo": "\xbb", + "raquo;": "\xbb", + "rarr;": "\u2192", + "rarrap;": "\u2975", + "rarrb;": "\u21e5", + "rarrbfs;": "\u2920", + "rarrc;": "\u2933", + "rarrfs;": "\u291e", + "rarrhk;": "\u21aa", + "rarrlp;": "\u21ac", + "rarrpl;": "\u2945", + "rarrsim;": "\u2974", + "rarrtl;": "\u21a3", + "rarrw;": "\u219d", + "ratail;": "\u291a", + "ratio;": "\u2236", + "rationals;": "\u211a", + "rbarr;": "\u290d", + "rbbrk;": "\u2773", + "rbrace;": "}", + "rbrack;": "]", + "rbrke;": "\u298c", + "rbrksld;": "\u298e", + "rbrkslu;": "\u2990", + "rcaron;": "\u0159", + "rcedil;": "\u0157", + "rceil;": "\u2309", + "rcub;": "}", + "rcy;": "\u0440", + "rdca;": "\u2937", + "rdldhar;": "\u2969", + "rdquo;": "\u201d", + "rdquor;": "\u201d", + "rdsh;": "\u21b3", + "real;": "\u211c", + "realine;": "\u211b", + "realpart;": "\u211c", + "reals;": "\u211d", + "rect;": "\u25ad", + "reg": "\xae", + "reg;": "\xae", + "rfisht;": "\u297d", + "rfloor;": "\u230b", + "rfr;": "\U0001d52f", + "rhard;": "\u21c1", + "rharu;": "\u21c0", + "rharul;": "\u296c", + "rho;": "\u03c1", + "rhov;": "\u03f1", + "rightarrow;": "\u2192", + "rightarrowtail;": "\u21a3", + "rightharpoondown;": "\u21c1", + "rightharpoonup;": "\u21c0", + "rightleftarrows;": "\u21c4", + "rightleftharpoons;": "\u21cc", + "rightrightarrows;": "\u21c9", + "rightsquigarrow;": "\u219d", + "rightthreetimes;": "\u22cc", + "ring;": "\u02da", + "risingdotseq;": "\u2253", + "rlarr;": "\u21c4", + "rlhar;": "\u21cc", + "rlm;": "\u200f", + "rmoust;": "\u23b1", + "rmoustache;": "\u23b1", + "rnmid;": "\u2aee", + "roang;": "\u27ed", + "roarr;": "\u21fe", + "robrk;": "\u27e7", + "ropar;": "\u2986", + "ropf;": "\U0001d563", + "roplus;": "\u2a2e", + "rotimes;": "\u2a35", + "rpar;": ")", + "rpargt;": "\u2994", + "rppolint;": "\u2a12", + "rrarr;": "\u21c9", + "rsaquo;": "\u203a", + "rscr;": "\U0001d4c7", + "rsh;": "\u21b1", + "rsqb;": "]", + "rsquo;": "\u2019", + "rsquor;": "\u2019", + "rthree;": "\u22cc", + "rtimes;": "\u22ca", + "rtri;": "\u25b9", + "rtrie;": "\u22b5", + "rtrif;": "\u25b8", + "rtriltri;": "\u29ce", + "ruluhar;": "\u2968", + "rx;": "\u211e", + "sacute;": "\u015b", + "sbquo;": "\u201a", + "sc;": "\u227b", + "scE;": "\u2ab4", + "scap;": "\u2ab8", + "scaron;": "\u0161", + "sccue;": "\u227d", + "sce;": "\u2ab0", + "scedil;": "\u015f", + "scirc;": "\u015d", + "scnE;": "\u2ab6", + "scnap;": "\u2aba", + "scnsim;": "\u22e9", + "scpolint;": "\u2a13", + "scsim;": "\u227f", + "scy;": "\u0441", + "sdot;": "\u22c5", + "sdotb;": "\u22a1", + "sdote;": "\u2a66", + "seArr;": "\u21d8", + "searhk;": "\u2925", + "searr;": "\u2198", + "searrow;": "\u2198", + "sect": "\xa7", + "sect;": "\xa7", + "semi;": ";", + "seswar;": "\u2929", + "setminus;": "\u2216", + "setmn;": "\u2216", + "sext;": "\u2736", + "sfr;": "\U0001d530", + "sfrown;": "\u2322", + "sharp;": "\u266f", + "shchcy;": "\u0449", + "shcy;": "\u0448", + "shortmid;": "\u2223", + "shortparallel;": "\u2225", + "shy": "\xad", + "shy;": "\xad", + "sigma;": "\u03c3", + "sigmaf;": "\u03c2", + "sigmav;": "\u03c2", + "sim;": "\u223c", + "simdot;": "\u2a6a", + "sime;": "\u2243", + "simeq;": "\u2243", + "simg;": "\u2a9e", + "simgE;": "\u2aa0", + "siml;": "\u2a9d", + "simlE;": "\u2a9f", + "simne;": "\u2246", + "simplus;": "\u2a24", + "simrarr;": "\u2972", + "slarr;": "\u2190", + "smallsetminus;": "\u2216", + "smashp;": "\u2a33", + "smeparsl;": "\u29e4", + "smid;": "\u2223", + "smile;": "\u2323", + "smt;": "\u2aaa", + "smte;": "\u2aac", + "smtes;": "\u2aac\ufe00", + "softcy;": "\u044c", + "sol;": "/", + "solb;": "\u29c4", + "solbar;": "\u233f", + "sopf;": "\U0001d564", + "spades;": "\u2660", + "spadesuit;": "\u2660", + "spar;": "\u2225", + "sqcap;": "\u2293", + "sqcaps;": "\u2293\ufe00", + "sqcup;": "\u2294", + "sqcups;": "\u2294\ufe00", + "sqsub;": "\u228f", + "sqsube;": "\u2291", + "sqsubset;": "\u228f", + "sqsubseteq;": "\u2291", + "sqsup;": "\u2290", + "sqsupe;": "\u2292", + "sqsupset;": "\u2290", + "sqsupseteq;": "\u2292", + "squ;": "\u25a1", + "square;": "\u25a1", + "squarf;": "\u25aa", + "squf;": "\u25aa", + "srarr;": "\u2192", + "sscr;": "\U0001d4c8", + "ssetmn;": "\u2216", + "ssmile;": "\u2323", + "sstarf;": "\u22c6", + "star;": "\u2606", + "starf;": "\u2605", + "straightepsilon;": "\u03f5", + "straightphi;": "\u03d5", + "strns;": "\xaf", + "sub;": "\u2282", + "subE;": "\u2ac5", + "subdot;": "\u2abd", + "sube;": "\u2286", + "subedot;": "\u2ac3", + "submult;": "\u2ac1", + "subnE;": "\u2acb", + "subne;": "\u228a", + "subplus;": "\u2abf", + "subrarr;": "\u2979", + "subset;": "\u2282", + "subseteq;": "\u2286", + "subseteqq;": "\u2ac5", + "subsetneq;": "\u228a", + "subsetneqq;": "\u2acb", + "subsim;": "\u2ac7", + "subsub;": "\u2ad5", + "subsup;": "\u2ad3", + "succ;": "\u227b", + "succapprox;": "\u2ab8", + "succcurlyeq;": "\u227d", + "succeq;": "\u2ab0", + "succnapprox;": "\u2aba", + "succneqq;": "\u2ab6", + "succnsim;": "\u22e9", + "succsim;": "\u227f", + "sum;": "\u2211", + "sung;": "\u266a", + "sup1": "\xb9", + "sup1;": "\xb9", + "sup2": "\xb2", + "sup2;": "\xb2", + "sup3": "\xb3", + "sup3;": "\xb3", + "sup;": "\u2283", + "supE;": "\u2ac6", + "supdot;": "\u2abe", + "supdsub;": "\u2ad8", + "supe;": "\u2287", + "supedot;": "\u2ac4", + "suphsol;": "\u27c9", + "suphsub;": "\u2ad7", + "suplarr;": "\u297b", + "supmult;": "\u2ac2", + "supnE;": "\u2acc", + "supne;": "\u228b", + "supplus;": "\u2ac0", + "supset;": "\u2283", + "supseteq;": "\u2287", + "supseteqq;": "\u2ac6", + "supsetneq;": "\u228b", + "supsetneqq;": "\u2acc", + "supsim;": "\u2ac8", + "supsub;": "\u2ad4", + "supsup;": "\u2ad6", + "swArr;": "\u21d9", + "swarhk;": "\u2926", + "swarr;": "\u2199", + "swarrow;": "\u2199", + "swnwar;": "\u292a", + "szlig": "\xdf", + "szlig;": "\xdf", + "target;": "\u2316", + "tau;": "\u03c4", + "tbrk;": "\u23b4", + "tcaron;": "\u0165", + "tcedil;": "\u0163", + "tcy;": "\u0442", + "tdot;": "\u20db", + "telrec;": "\u2315", + "tfr;": "\U0001d531", + "there4;": "\u2234", + "therefore;": "\u2234", + "theta;": "\u03b8", + "thetasym;": "\u03d1", + "thetav;": "\u03d1", + "thickapprox;": "\u2248", + "thicksim;": "\u223c", + "thinsp;": "\u2009", + "thkap;": "\u2248", + "thksim;": "\u223c", + "thorn": "\xfe", + "thorn;": "\xfe", + "tilde;": "\u02dc", + "times": "\xd7", + "times;": "\xd7", + "timesb;": "\u22a0", + "timesbar;": "\u2a31", + "timesd;": "\u2a30", + "tint;": "\u222d", + "toea;": "\u2928", + "top;": "\u22a4", + "topbot;": "\u2336", + "topcir;": "\u2af1", + "topf;": "\U0001d565", + "topfork;": "\u2ada", + "tosa;": "\u2929", + "tprime;": "\u2034", + "trade;": "\u2122", + "triangle;": "\u25b5", + "triangledown;": "\u25bf", + "triangleleft;": "\u25c3", + "trianglelefteq;": "\u22b4", + "triangleq;": "\u225c", + "triangleright;": "\u25b9", + "trianglerighteq;": "\u22b5", + "tridot;": "\u25ec", + "trie;": "\u225c", + "triminus;": "\u2a3a", + "triplus;": "\u2a39", + "trisb;": "\u29cd", + "tritime;": "\u2a3b", + "trpezium;": "\u23e2", + "tscr;": "\U0001d4c9", + "tscy;": "\u0446", + "tshcy;": "\u045b", + "tstrok;": "\u0167", + "twixt;": "\u226c", + "twoheadleftarrow;": "\u219e", + "twoheadrightarrow;": "\u21a0", + "uArr;": "\u21d1", + "uHar;": "\u2963", + "uacute": "\xfa", + "uacute;": "\xfa", + "uarr;": "\u2191", + "ubrcy;": "\u045e", + "ubreve;": "\u016d", + "ucirc": "\xfb", + "ucirc;": "\xfb", + "ucy;": "\u0443", + "udarr;": "\u21c5", + "udblac;": "\u0171", + "udhar;": "\u296e", + "ufisht;": "\u297e", + "ufr;": "\U0001d532", + "ugrave": "\xf9", + "ugrave;": "\xf9", + "uharl;": "\u21bf", + "uharr;": "\u21be", + "uhblk;": "\u2580", + "ulcorn;": "\u231c", + "ulcorner;": "\u231c", + "ulcrop;": "\u230f", + "ultri;": "\u25f8", + "umacr;": "\u016b", + "uml": "\xa8", + "uml;": "\xa8", + "uogon;": "\u0173", + "uopf;": "\U0001d566", + "uparrow;": "\u2191", + "updownarrow;": "\u2195", + "upharpoonleft;": "\u21bf", + "upharpoonright;": "\u21be", + "uplus;": "\u228e", + "upsi;": "\u03c5", + "upsih;": "\u03d2", + "upsilon;": "\u03c5", + "upuparrows;": "\u21c8", + "urcorn;": "\u231d", + "urcorner;": "\u231d", + "urcrop;": "\u230e", + "uring;": "\u016f", + "urtri;": "\u25f9", + "uscr;": "\U0001d4ca", + "utdot;": "\u22f0", + "utilde;": "\u0169", + "utri;": "\u25b5", + "utrif;": "\u25b4", + "uuarr;": "\u21c8", + "uuml": "\xfc", + "uuml;": "\xfc", + "uwangle;": "\u29a7", + "vArr;": "\u21d5", + "vBar;": "\u2ae8", + "vBarv;": "\u2ae9", + "vDash;": "\u22a8", + "vangrt;": "\u299c", + "varepsilon;": "\u03f5", + "varkappa;": "\u03f0", + "varnothing;": "\u2205", + "varphi;": "\u03d5", + "varpi;": "\u03d6", + "varpropto;": "\u221d", + "varr;": "\u2195", + "varrho;": "\u03f1", + "varsigma;": "\u03c2", + "varsubsetneq;": "\u228a\ufe00", + "varsubsetneqq;": "\u2acb\ufe00", + "varsupsetneq;": "\u228b\ufe00", + "varsupsetneqq;": "\u2acc\ufe00", + "vartheta;": "\u03d1", + "vartriangleleft;": "\u22b2", + "vartriangleright;": "\u22b3", + "vcy;": "\u0432", + "vdash;": "\u22a2", + "vee;": "\u2228", + "veebar;": "\u22bb", + "veeeq;": "\u225a", + "vellip;": "\u22ee", + "verbar;": "|", + "vert;": "|", + "vfr;": "\U0001d533", + "vltri;": "\u22b2", + "vnsub;": "\u2282\u20d2", + "vnsup;": "\u2283\u20d2", + "vopf;": "\U0001d567", + "vprop;": "\u221d", + "vrtri;": "\u22b3", + "vscr;": "\U0001d4cb", + "vsubnE;": "\u2acb\ufe00", + "vsubne;": "\u228a\ufe00", + "vsupnE;": "\u2acc\ufe00", + "vsupne;": "\u228b\ufe00", + "vzigzag;": "\u299a", + "wcirc;": "\u0175", + "wedbar;": "\u2a5f", + "wedge;": "\u2227", + "wedgeq;": "\u2259", + "weierp;": "\u2118", + "wfr;": "\U0001d534", + "wopf;": "\U0001d568", + "wp;": "\u2118", + "wr;": "\u2240", + "wreath;": "\u2240", + "wscr;": "\U0001d4cc", + "xcap;": "\u22c2", + "xcirc;": "\u25ef", + "xcup;": "\u22c3", + "xdtri;": "\u25bd", + "xfr;": "\U0001d535", + "xhArr;": "\u27fa", + "xharr;": "\u27f7", + "xi;": "\u03be", + "xlArr;": "\u27f8", + "xlarr;": "\u27f5", + "xmap;": "\u27fc", + "xnis;": "\u22fb", + "xodot;": "\u2a00", + "xopf;": "\U0001d569", + "xoplus;": "\u2a01", + "xotime;": "\u2a02", + "xrArr;": "\u27f9", + "xrarr;": "\u27f6", + "xscr;": "\U0001d4cd", + "xsqcup;": "\u2a06", + "xuplus;": "\u2a04", + "xutri;": "\u25b3", + "xvee;": "\u22c1", + "xwedge;": "\u22c0", + "yacute": "\xfd", + "yacute;": "\xfd", + "yacy;": "\u044f", + "ycirc;": "\u0177", + "ycy;": "\u044b", + "yen": "\xa5", + "yen;": "\xa5", + "yfr;": "\U0001d536", + "yicy;": "\u0457", + "yopf;": "\U0001d56a", + "yscr;": "\U0001d4ce", + "yucy;": "\u044e", + "yuml": "\xff", + "yuml;": "\xff", + "zacute;": "\u017a", + "zcaron;": "\u017e", + "zcy;": "\u0437", + "zdot;": "\u017c", + "zeetrf;": "\u2128", + "zeta;": "\u03b6", + "zfr;": "\U0001d537", + "zhcy;": "\u0436", + "zigrarr;": "\u21dd", + "zopf;": "\U0001d56b", + "zscr;": "\U0001d4cf", + "zwj;": "\u200d", + "zwnj;": "\u200c", +} + +replacementCharacters = { + 0x0: "\uFFFD", + 0x0d: "\u000D", + 0x80: "\u20AC", + 0x81: "\u0081", + 0x82: "\u201A", + 0x83: "\u0192", + 0x84: "\u201E", + 0x85: "\u2026", + 0x86: "\u2020", + 0x87: "\u2021", + 0x88: "\u02C6", + 0x89: "\u2030", + 0x8A: "\u0160", + 0x8B: "\u2039", + 0x8C: "\u0152", + 0x8D: "\u008D", + 0x8E: "\u017D", + 0x8F: "\u008F", + 0x90: "\u0090", + 0x91: "\u2018", + 0x92: "\u2019", + 0x93: "\u201C", + 0x94: "\u201D", + 0x95: "\u2022", + 0x96: "\u2013", + 0x97: "\u2014", + 0x98: "\u02DC", + 0x99: "\u2122", + 0x9A: "\u0161", + 0x9B: "\u203A", + 0x9C: "\u0153", + 0x9D: "\u009D", + 0x9E: "\u017E", + 0x9F: "\u0178", +} + +tokenTypes = { + "Doctype": 0, + "Characters": 1, + "SpaceCharacters": 2, + "StartTag": 3, + "EndTag": 4, + "EmptyTag": 5, + "Comment": 6, + "ParseError": 7 +} + +tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], + tokenTypes["EmptyTag"]]) + + +prefixes = {v: k for k, v in namespaces.items()} +prefixes["http://www.w3.org/1998/Math/MathML"] = "math" + + +class DataLossWarning(UserWarning): + """Raised when the current tree is unable to represent the input data""" + pass + + +class _ReparseException(Exception): + pass diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__init__.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..f12de6b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-39.pyc new file mode 100644 index 0000000..9b54a2f Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000..de9f900 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-39.pyc new file mode 100644 index 0000000..6012933 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-39.pyc new file mode 100644 index 0000000..63aa325 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-39.pyc new file mode 100644 index 0000000..84c713b Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-39.pyc new file mode 100644 index 0000000..4d227f5 Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-39.pyc b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-39.pyc new file mode 100644 index 0000000..b4a51fd Binary files /dev/null and b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-39.pyc differ diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py new file mode 100644 index 0000000..5ba926e --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py @@ -0,0 +1,29 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + +from collections import OrderedDict + + +def _attr_key(attr): + """Return an appropriate key for an attribute for sorting + + Attributes have a namespace that can be either ``None`` or a string. We + can't compare the two because they're different types, so we convert + ``None`` to an empty string first. + + """ + return (attr[0][0] or ''), attr[0][1] + + +class Filter(base.Filter): + """Alphabetizes attributes for elements""" + def __iter__(self): + for token in base.Filter.__iter__(self): + if token["type"] in ("StartTag", "EmptyTag"): + attrs = OrderedDict() + for name, value in sorted(token["data"].items(), + key=_attr_key): + attrs[name] = value + token["data"] = attrs + yield token diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/base.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/base.py new file mode 100644 index 0000000..c7dbaed --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/base.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import, division, unicode_literals + + +class Filter(object): + def __init__(self, source): + self.source = source + + def __iter__(self): + return iter(self.source) + + def __getattr__(self, name): + return getattr(self.source, name) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py new file mode 100644 index 0000000..aefb5c8 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py @@ -0,0 +1,73 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + + +class Filter(base.Filter): + """Injects ```` tag into head of document""" + def __init__(self, source, encoding): + """Creates a Filter + + :arg source: the source token stream + + :arg encoding: the encoding to set + + """ + base.Filter.__init__(self, source) + self.encoding = encoding + + def __iter__(self): + state = "pre_head" + meta_found = (self.encoding is None) + pending = [] + + for token in base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag": + if token["name"].lower() == "head": + state = "in_head" + + elif type == "EmptyTag": + if token["name"].lower() == "meta": + # replace charset with actual encoding + has_http_equiv_content_type = False + for (namespace, name), value in token["data"].items(): + if namespace is not None: + continue + elif name.lower() == 'charset': + token["data"][(namespace, name)] = self.encoding + meta_found = True + break + elif name == 'http-equiv' and value.lower() == 'content-type': + has_http_equiv_content_type = True + else: + if has_http_equiv_content_type and (None, "content") in token["data"]: + token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding + meta_found = True + + elif token["name"].lower() == "head" and not meta_found: + # insert meta into empty head + yield {"type": "StartTag", "name": "head", + "data": token["data"]} + yield {"type": "EmptyTag", "name": "meta", + "data": {(None, "charset"): self.encoding}} + yield {"type": "EndTag", "name": "head"} + meta_found = True + continue + + elif type == "EndTag": + if token["name"].lower() == "head" and pending: + # insert meta into head (if necessary) and flush pending queue + yield pending.pop(0) + if not meta_found: + yield {"type": "EmptyTag", "name": "meta", + "data": {(None, "charset"): self.encoding}} + while pending: + yield pending.pop(0) + meta_found = True + state = "post_head" + + if state == "in_head": + pending.append(token) + else: + yield token diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/lint.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/lint.py new file mode 100644 index 0000000..fcc07ee --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/lint.py @@ -0,0 +1,93 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import text_type + +from . import base +from ..constants import namespaces, voidElements + +from ..constants import spaceCharacters +spaceCharacters = "".join(spaceCharacters) + + +class Filter(base.Filter): + """Lints the token stream for errors + + If it finds any errors, it'll raise an ``AssertionError``. + + """ + def __init__(self, source, require_matching_tags=True): + """Creates a Filter + + :arg source: the source token stream + + :arg require_matching_tags: whether or not to require matching tags + + """ + super(Filter, self).__init__(source) + self.require_matching_tags = require_matching_tags + + def __iter__(self): + open_elements = [] + for token in base.Filter.__iter__(self): + type = token["type"] + if type in ("StartTag", "EmptyTag"): + namespace = token["namespace"] + name = token["name"] + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + assert isinstance(token["data"], dict) + if (not namespace or namespace == namespaces["html"]) and name in voidElements: + assert type == "EmptyTag" + else: + assert type == "StartTag" + if type == "StartTag" and self.require_matching_tags: + open_elements.append((namespace, name)) + for (namespace, name), value in token["data"].items(): + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + assert isinstance(value, text_type) + + elif type == "EndTag": + namespace = token["namespace"] + name = token["name"] + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + if (not namespace or namespace == namespaces["html"]) and name in voidElements: + assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name} + elif self.require_matching_tags: + start = open_elements.pop() + assert start == (namespace, name) + + elif type == "Comment": + data = token["data"] + assert isinstance(data, text_type) + + elif type in ("Characters", "SpaceCharacters"): + data = token["data"] + assert isinstance(data, text_type) + assert data != "" + if type == "SpaceCharacters": + assert data.strip(spaceCharacters) == "" + + elif type == "Doctype": + name = token["name"] + assert name is None or isinstance(name, text_type) + assert token["publicId"] is None or isinstance(name, text_type) + assert token["systemId"] is None or isinstance(name, text_type) + + elif type == "Entity": + assert isinstance(token["name"], text_type) + + elif type == "SerializerError": + assert isinstance(token["data"], text_type) + + else: + assert False, "Unknown token type: %(type)s" % {"type": type} + + yield token diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/optionaltags.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/optionaltags.py new file mode 100644 index 0000000..4a86501 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/optionaltags.py @@ -0,0 +1,207 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + + +class Filter(base.Filter): + """Removes optional tags from the token stream""" + def slider(self): + previous1 = previous2 = None + for token in self.source: + if previous1 is not None: + yield previous2, previous1, token + previous2 = previous1 + previous1 = token + if previous1 is not None: + yield previous2, previous1, None + + def __iter__(self): + for previous, token, next in self.slider(): + type = token["type"] + if type == "StartTag": + if (token["data"] or + not self.is_optional_start(token["name"], previous, next)): + yield token + elif type == "EndTag": + if not self.is_optional_end(token["name"], next): + yield token + else: + yield token + + def is_optional_start(self, tagname, previous, next): + type = next and next["type"] or None + if tagname in 'html': + # An html element's start tag may be omitted if the first thing + # inside the html element is not a space character or a comment. + return type not in ("Comment", "SpaceCharacters") + elif tagname == 'head': + # A head element's start tag may be omitted if the first thing + # inside the head element is an element. + # XXX: we also omit the start tag if the head element is empty + if type in ("StartTag", "EmptyTag"): + return True + elif type == "EndTag": + return next["name"] == "head" + elif tagname == 'body': + # A body element's start tag may be omitted if the first thing + # inside the body element is not a space character or a comment, + # except if the first thing inside the body element is a script + # or style element and the node immediately preceding the body + # element is a head element whose end tag has been omitted. + if type in ("Comment", "SpaceCharacters"): + return False + elif type == "StartTag": + # XXX: we do not look at the preceding event, so we never omit + # the body element's start tag if it's followed by a script or + # a style element. + return next["name"] not in ('script', 'style') + else: + return True + elif tagname == 'colgroup': + # A colgroup element's start tag may be omitted if the first thing + # inside the colgroup element is a col element, and if the element + # is not immediately preceded by another colgroup element whose + # end tag has been omitted. + if type in ("StartTag", "EmptyTag"): + # XXX: we do not look at the preceding event, so instead we never + # omit the colgroup element's end tag when it is immediately + # followed by another colgroup element. See is_optional_end. + return next["name"] == "col" + else: + return False + elif tagname == 'tbody': + # A tbody element's start tag may be omitted if the first thing + # inside the tbody element is a tr element, and if the element is + # not immediately preceded by a tbody, thead, or tfoot element + # whose end tag has been omitted. + if type == "StartTag": + # omit the thead and tfoot elements' end tag when they are + # immediately followed by a tbody element. See is_optional_end. + if previous and previous['type'] == 'EndTag' and \ + previous['name'] in ('tbody', 'thead', 'tfoot'): + return False + return next["name"] == 'tr' + else: + return False + return False + + def is_optional_end(self, tagname, next): + type = next and next["type"] or None + if tagname in ('html', 'head', 'body'): + # An html element's end tag may be omitted if the html element + # is not immediately followed by a space character or a comment. + return type not in ("Comment", "SpaceCharacters") + elif tagname in ('li', 'optgroup', 'tr'): + # A li element's end tag may be omitted if the li element is + # immediately followed by another li element or if there is + # no more content in the parent element. + # An optgroup element's end tag may be omitted if the optgroup + # element is immediately followed by another optgroup element, + # or if there is no more content in the parent element. + # A tr element's end tag may be omitted if the tr element is + # immediately followed by another tr element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] == tagname + else: + return type == "EndTag" or type is None + elif tagname in ('dt', 'dd'): + # A dt element's end tag may be omitted if the dt element is + # immediately followed by another dt element or a dd element. + # A dd element's end tag may be omitted if the dd element is + # immediately followed by another dd element or a dt element, + # or if there is no more content in the parent element. + if type == "StartTag": + return next["name"] in ('dt', 'dd') + elif tagname == 'dd': + return type == "EndTag" or type is None + else: + return False + elif tagname == 'p': + # A p element's end tag may be omitted if the p element is + # immediately followed by an address, article, aside, + # blockquote, datagrid, dialog, dir, div, dl, fieldset, + # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu, + # nav, ol, p, pre, section, table, or ul, element, or if + # there is no more content in the parent element. + if type in ("StartTag", "EmptyTag"): + return next["name"] in ('address', 'article', 'aside', + 'blockquote', 'datagrid', 'dialog', + 'dir', 'div', 'dl', 'fieldset', 'footer', + 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', + 'header', 'hr', 'menu', 'nav', 'ol', + 'p', 'pre', 'section', 'table', 'ul') + else: + return type == "EndTag" or type is None + elif tagname == 'option': + # An option element's end tag may be omitted if the option + # element is immediately followed by another option element, + # or if it is immediately followed by an optgroup + # element, or if there is no more content in the parent + # element. + if type == "StartTag": + return next["name"] in ('option', 'optgroup') + else: + return type == "EndTag" or type is None + elif tagname in ('rt', 'rp'): + # An rt element's end tag may be omitted if the rt element is + # immediately followed by an rt or rp element, or if there is + # no more content in the parent element. + # An rp element's end tag may be omitted if the rp element is + # immediately followed by an rt or rp element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] in ('rt', 'rp') + else: + return type == "EndTag" or type is None + elif tagname == 'colgroup': + # A colgroup element's end tag may be omitted if the colgroup + # element is not immediately followed by a space character or + # a comment. + if type in ("Comment", "SpaceCharacters"): + return False + elif type == "StartTag": + # XXX: we also look for an immediately following colgroup + # element. See is_optional_start. + return next["name"] != 'colgroup' + else: + return True + elif tagname in ('thead', 'tbody'): + # A thead element's end tag may be omitted if the thead element + # is immediately followed by a tbody or tfoot element. + # A tbody element's end tag may be omitted if the tbody element + # is immediately followed by a tbody or tfoot element, or if + # there is no more content in the parent element. + # A tfoot element's end tag may be omitted if the tfoot element + # is immediately followed by a tbody element, or if there is no + # more content in the parent element. + # XXX: we never omit the end tag when the following element is + # a tbody. See is_optional_start. + if type == "StartTag": + return next["name"] in ['tbody', 'tfoot'] + elif tagname == 'tbody': + return type == "EndTag" or type is None + else: + return False + elif tagname == 'tfoot': + # A tfoot element's end tag may be omitted if the tfoot element + # is immediately followed by a tbody element, or if there is no + # more content in the parent element. + # XXX: we never omit the end tag when the following element is + # a tbody. See is_optional_start. + if type == "StartTag": + return next["name"] == 'tbody' + else: + return type == "EndTag" or type is None + elif tagname in ('td', 'th'): + # A td element's end tag may be omitted if the td element is + # immediately followed by a td or th element, or if there is + # no more content in the parent element. + # A th element's end tag may be omitted if the th element is + # immediately followed by a td or th element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] in ('td', 'th') + else: + return type == "EndTag" or type is None + return False diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/sanitizer.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/sanitizer.py new file mode 100644 index 0000000..aa7431d --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/sanitizer.py @@ -0,0 +1,916 @@ +"""Deprecated from html5lib 1.1. + +See `here `_ for +information about its deprecation; `Bleach `_ +is recommended as a replacement. Please let us know in the aforementioned issue +if Bleach is unsuitable for your needs. + +""" +from __future__ import absolute_import, division, unicode_literals + +import re +import warnings +from xml.sax.saxutils import escape, unescape + +from pip._vendor.six.moves import urllib_parse as urlparse + +from . import base +from ..constants import namespaces, prefixes + +__all__ = ["Filter"] + + +_deprecation_msg = ( + "html5lib's sanitizer is deprecated; see " + + "https://github.com/html5lib/html5lib-python/issues/443 and please let " + + "us know if Bleach is unsuitable for your needs" +) + +warnings.warn(_deprecation_msg, DeprecationWarning) + +allowed_elements = frozenset(( + (namespaces['html'], 'a'), + (namespaces['html'], 'abbr'), + (namespaces['html'], 'acronym'), + (namespaces['html'], 'address'), + (namespaces['html'], 'area'), + (namespaces['html'], 'article'), + (namespaces['html'], 'aside'), + (namespaces['html'], 'audio'), + (namespaces['html'], 'b'), + (namespaces['html'], 'big'), + (namespaces['html'], 'blockquote'), + (namespaces['html'], 'br'), + (namespaces['html'], 'button'), + (namespaces['html'], 'canvas'), + (namespaces['html'], 'caption'), + (namespaces['html'], 'center'), + (namespaces['html'], 'cite'), + (namespaces['html'], 'code'), + (namespaces['html'], 'col'), + (namespaces['html'], 'colgroup'), + (namespaces['html'], 'command'), + (namespaces['html'], 'datagrid'), + (namespaces['html'], 'datalist'), + (namespaces['html'], 'dd'), + (namespaces['html'], 'del'), + (namespaces['html'], 'details'), + (namespaces['html'], 'dfn'), + (namespaces['html'], 'dialog'), + (namespaces['html'], 'dir'), + (namespaces['html'], 'div'), + (namespaces['html'], 'dl'), + (namespaces['html'], 'dt'), + (namespaces['html'], 'em'), + (namespaces['html'], 'event-source'), + (namespaces['html'], 'fieldset'), + (namespaces['html'], 'figcaption'), + (namespaces['html'], 'figure'), + (namespaces['html'], 'footer'), + (namespaces['html'], 'font'), + (namespaces['html'], 'form'), + (namespaces['html'], 'header'), + (namespaces['html'], 'h1'), + (namespaces['html'], 'h2'), + (namespaces['html'], 'h3'), + (namespaces['html'], 'h4'), + (namespaces['html'], 'h5'), + (namespaces['html'], 'h6'), + (namespaces['html'], 'hr'), + (namespaces['html'], 'i'), + (namespaces['html'], 'img'), + (namespaces['html'], 'input'), + (namespaces['html'], 'ins'), + (namespaces['html'], 'keygen'), + (namespaces['html'], 'kbd'), + (namespaces['html'], 'label'), + (namespaces['html'], 'legend'), + (namespaces['html'], 'li'), + (namespaces['html'], 'm'), + (namespaces['html'], 'map'), + (namespaces['html'], 'menu'), + (namespaces['html'], 'meter'), + (namespaces['html'], 'multicol'), + (namespaces['html'], 'nav'), + (namespaces['html'], 'nextid'), + (namespaces['html'], 'ol'), + (namespaces['html'], 'output'), + (namespaces['html'], 'optgroup'), + (namespaces['html'], 'option'), + (namespaces['html'], 'p'), + (namespaces['html'], 'pre'), + (namespaces['html'], 'progress'), + (namespaces['html'], 'q'), + (namespaces['html'], 's'), + (namespaces['html'], 'samp'), + (namespaces['html'], 'section'), + (namespaces['html'], 'select'), + (namespaces['html'], 'small'), + (namespaces['html'], 'sound'), + (namespaces['html'], 'source'), + (namespaces['html'], 'spacer'), + (namespaces['html'], 'span'), + (namespaces['html'], 'strike'), + (namespaces['html'], 'strong'), + (namespaces['html'], 'sub'), + (namespaces['html'], 'sup'), + (namespaces['html'], 'table'), + (namespaces['html'], 'tbody'), + (namespaces['html'], 'td'), + (namespaces['html'], 'textarea'), + (namespaces['html'], 'time'), + (namespaces['html'], 'tfoot'), + (namespaces['html'], 'th'), + (namespaces['html'], 'thead'), + (namespaces['html'], 'tr'), + (namespaces['html'], 'tt'), + (namespaces['html'], 'u'), + (namespaces['html'], 'ul'), + (namespaces['html'], 'var'), + (namespaces['html'], 'video'), + (namespaces['mathml'], 'maction'), + (namespaces['mathml'], 'math'), + (namespaces['mathml'], 'merror'), + (namespaces['mathml'], 'mfrac'), + (namespaces['mathml'], 'mi'), + (namespaces['mathml'], 'mmultiscripts'), + (namespaces['mathml'], 'mn'), + (namespaces['mathml'], 'mo'), + (namespaces['mathml'], 'mover'), + (namespaces['mathml'], 'mpadded'), + (namespaces['mathml'], 'mphantom'), + (namespaces['mathml'], 'mprescripts'), + (namespaces['mathml'], 'mroot'), + (namespaces['mathml'], 'mrow'), + (namespaces['mathml'], 'mspace'), + (namespaces['mathml'], 'msqrt'), + (namespaces['mathml'], 'mstyle'), + (namespaces['mathml'], 'msub'), + (namespaces['mathml'], 'msubsup'), + (namespaces['mathml'], 'msup'), + (namespaces['mathml'], 'mtable'), + (namespaces['mathml'], 'mtd'), + (namespaces['mathml'], 'mtext'), + (namespaces['mathml'], 'mtr'), + (namespaces['mathml'], 'munder'), + (namespaces['mathml'], 'munderover'), + (namespaces['mathml'], 'none'), + (namespaces['svg'], 'a'), + (namespaces['svg'], 'animate'), + (namespaces['svg'], 'animateColor'), + (namespaces['svg'], 'animateMotion'), + (namespaces['svg'], 'animateTransform'), + (namespaces['svg'], 'clipPath'), + (namespaces['svg'], 'circle'), + (namespaces['svg'], 'defs'), + (namespaces['svg'], 'desc'), + (namespaces['svg'], 'ellipse'), + (namespaces['svg'], 'font-face'), + (namespaces['svg'], 'font-face-name'), + (namespaces['svg'], 'font-face-src'), + (namespaces['svg'], 'g'), + (namespaces['svg'], 'glyph'), + (namespaces['svg'], 'hkern'), + (namespaces['svg'], 'linearGradient'), + (namespaces['svg'], 'line'), + (namespaces['svg'], 'marker'), + (namespaces['svg'], 'metadata'), + (namespaces['svg'], 'missing-glyph'), + (namespaces['svg'], 'mpath'), + (namespaces['svg'], 'path'), + (namespaces['svg'], 'polygon'), + (namespaces['svg'], 'polyline'), + (namespaces['svg'], 'radialGradient'), + (namespaces['svg'], 'rect'), + (namespaces['svg'], 'set'), + (namespaces['svg'], 'stop'), + (namespaces['svg'], 'svg'), + (namespaces['svg'], 'switch'), + (namespaces['svg'], 'text'), + (namespaces['svg'], 'title'), + (namespaces['svg'], 'tspan'), + (namespaces['svg'], 'use'), +)) + +allowed_attributes = frozenset(( + # HTML attributes + (None, 'abbr'), + (None, 'accept'), + (None, 'accept-charset'), + (None, 'accesskey'), + (None, 'action'), + (None, 'align'), + (None, 'alt'), + (None, 'autocomplete'), + (None, 'autofocus'), + (None, 'axis'), + (None, 'background'), + (None, 'balance'), + (None, 'bgcolor'), + (None, 'bgproperties'), + (None, 'border'), + (None, 'bordercolor'), + (None, 'bordercolordark'), + (None, 'bordercolorlight'), + (None, 'bottompadding'), + (None, 'cellpadding'), + (None, 'cellspacing'), + (None, 'ch'), + (None, 'challenge'), + (None, 'char'), + (None, 'charoff'), + (None, 'choff'), + (None, 'charset'), + (None, 'checked'), + (None, 'cite'), + (None, 'class'), + (None, 'clear'), + (None, 'color'), + (None, 'cols'), + (None, 'colspan'), + (None, 'compact'), + (None, 'contenteditable'), + (None, 'controls'), + (None, 'coords'), + (None, 'data'), + (None, 'datafld'), + (None, 'datapagesize'), + (None, 'datasrc'), + (None, 'datetime'), + (None, 'default'), + (None, 'delay'), + (None, 'dir'), + (None, 'disabled'), + (None, 'draggable'), + (None, 'dynsrc'), + (None, 'enctype'), + (None, 'end'), + (None, 'face'), + (None, 'for'), + (None, 'form'), + (None, 'frame'), + (None, 'galleryimg'), + (None, 'gutter'), + (None, 'headers'), + (None, 'height'), + (None, 'hidefocus'), + (None, 'hidden'), + (None, 'high'), + (None, 'href'), + (None, 'hreflang'), + (None, 'hspace'), + (None, 'icon'), + (None, 'id'), + (None, 'inputmode'), + (None, 'ismap'), + (None, 'keytype'), + (None, 'label'), + (None, 'leftspacing'), + (None, 'lang'), + (None, 'list'), + (None, 'longdesc'), + (None, 'loop'), + (None, 'loopcount'), + (None, 'loopend'), + (None, 'loopstart'), + (None, 'low'), + (None, 'lowsrc'), + (None, 'max'), + (None, 'maxlength'), + (None, 'media'), + (None, 'method'), + (None, 'min'), + (None, 'multiple'), + (None, 'name'), + (None, 'nohref'), + (None, 'noshade'), + (None, 'nowrap'), + (None, 'open'), + (None, 'optimum'), + (None, 'pattern'), + (None, 'ping'), + (None, 'point-size'), + (None, 'poster'), + (None, 'pqg'), + (None, 'preload'), + (None, 'prompt'), + (None, 'radiogroup'), + (None, 'readonly'), + (None, 'rel'), + (None, 'repeat-max'), + (None, 'repeat-min'), + (None, 'replace'), + (None, 'required'), + (None, 'rev'), + (None, 'rightspacing'), + (None, 'rows'), + (None, 'rowspan'), + (None, 'rules'), + (None, 'scope'), + (None, 'selected'), + (None, 'shape'), + (None, 'size'), + (None, 'span'), + (None, 'src'), + (None, 'start'), + (None, 'step'), + (None, 'style'), + (None, 'summary'), + (None, 'suppress'), + (None, 'tabindex'), + (None, 'target'), + (None, 'template'), + (None, 'title'), + (None, 'toppadding'), + (None, 'type'), + (None, 'unselectable'), + (None, 'usemap'), + (None, 'urn'), + (None, 'valign'), + (None, 'value'), + (None, 'variable'), + (None, 'volume'), + (None, 'vspace'), + (None, 'vrml'), + (None, 'width'), + (None, 'wrap'), + (namespaces['xml'], 'lang'), + # MathML attributes + (None, 'actiontype'), + (None, 'align'), + (None, 'columnalign'), + (None, 'columnalign'), + (None, 'columnalign'), + (None, 'columnlines'), + (None, 'columnspacing'), + (None, 'columnspan'), + (None, 'depth'), + (None, 'display'), + (None, 'displaystyle'), + (None, 'equalcolumns'), + (None, 'equalrows'), + (None, 'fence'), + (None, 'fontstyle'), + (None, 'fontweight'), + (None, 'frame'), + (None, 'height'), + (None, 'linethickness'), + (None, 'lspace'), + (None, 'mathbackground'), + (None, 'mathcolor'), + (None, 'mathvariant'), + (None, 'mathvariant'), + (None, 'maxsize'), + (None, 'minsize'), + (None, 'other'), + (None, 'rowalign'), + (None, 'rowalign'), + (None, 'rowalign'), + (None, 'rowlines'), + (None, 'rowspacing'), + (None, 'rowspan'), + (None, 'rspace'), + (None, 'scriptlevel'), + (None, 'selection'), + (None, 'separator'), + (None, 'stretchy'), + (None, 'width'), + (None, 'width'), + (namespaces['xlink'], 'href'), + (namespaces['xlink'], 'show'), + (namespaces['xlink'], 'type'), + # SVG attributes + (None, 'accent-height'), + (None, 'accumulate'), + (None, 'additive'), + (None, 'alphabetic'), + (None, 'arabic-form'), + (None, 'ascent'), + (None, 'attributeName'), + (None, 'attributeType'), + (None, 'baseProfile'), + (None, 'bbox'), + (None, 'begin'), + (None, 'by'), + (None, 'calcMode'), + (None, 'cap-height'), + (None, 'class'), + (None, 'clip-path'), + (None, 'color'), + (None, 'color-rendering'), + (None, 'content'), + (None, 'cx'), + (None, 'cy'), + (None, 'd'), + (None, 'dx'), + (None, 'dy'), + (None, 'descent'), + (None, 'display'), + (None, 'dur'), + (None, 'end'), + (None, 'fill'), + (None, 'fill-opacity'), + (None, 'fill-rule'), + (None, 'font-family'), + (None, 'font-size'), + (None, 'font-stretch'), + (None, 'font-style'), + (None, 'font-variant'), + (None, 'font-weight'), + (None, 'from'), + (None, 'fx'), + (None, 'fy'), + (None, 'g1'), + (None, 'g2'), + (None, 'glyph-name'), + (None, 'gradientUnits'), + (None, 'hanging'), + (None, 'height'), + (None, 'horiz-adv-x'), + (None, 'horiz-origin-x'), + (None, 'id'), + (None, 'ideographic'), + (None, 'k'), + (None, 'keyPoints'), + (None, 'keySplines'), + (None, 'keyTimes'), + (None, 'lang'), + (None, 'marker-end'), + (None, 'marker-mid'), + (None, 'marker-start'), + (None, 'markerHeight'), + (None, 'markerUnits'), + (None, 'markerWidth'), + (None, 'mathematical'), + (None, 'max'), + (None, 'min'), + (None, 'name'), + (None, 'offset'), + (None, 'opacity'), + (None, 'orient'), + (None, 'origin'), + (None, 'overline-position'), + (None, 'overline-thickness'), + (None, 'panose-1'), + (None, 'path'), + (None, 'pathLength'), + (None, 'points'), + (None, 'preserveAspectRatio'), + (None, 'r'), + (None, 'refX'), + (None, 'refY'), + (None, 'repeatCount'), + (None, 'repeatDur'), + (None, 'requiredExtensions'), + (None, 'requiredFeatures'), + (None, 'restart'), + (None, 'rotate'), + (None, 'rx'), + (None, 'ry'), + (None, 'slope'), + (None, 'stemh'), + (None, 'stemv'), + (None, 'stop-color'), + (None, 'stop-opacity'), + (None, 'strikethrough-position'), + (None, 'strikethrough-thickness'), + (None, 'stroke'), + (None, 'stroke-dasharray'), + (None, 'stroke-dashoffset'), + (None, 'stroke-linecap'), + (None, 'stroke-linejoin'), + (None, 'stroke-miterlimit'), + (None, 'stroke-opacity'), + (None, 'stroke-width'), + (None, 'systemLanguage'), + (None, 'target'), + (None, 'text-anchor'), + (None, 'to'), + (None, 'transform'), + (None, 'type'), + (None, 'u1'), + (None, 'u2'), + (None, 'underline-position'), + (None, 'underline-thickness'), + (None, 'unicode'), + (None, 'unicode-range'), + (None, 'units-per-em'), + (None, 'values'), + (None, 'version'), + (None, 'viewBox'), + (None, 'visibility'), + (None, 'width'), + (None, 'widths'), + (None, 'x'), + (None, 'x-height'), + (None, 'x1'), + (None, 'x2'), + (namespaces['xlink'], 'actuate'), + (namespaces['xlink'], 'arcrole'), + (namespaces['xlink'], 'href'), + (namespaces['xlink'], 'role'), + (namespaces['xlink'], 'show'), + (namespaces['xlink'], 'title'), + (namespaces['xlink'], 'type'), + (namespaces['xml'], 'base'), + (namespaces['xml'], 'lang'), + (namespaces['xml'], 'space'), + (None, 'y'), + (None, 'y1'), + (None, 'y2'), + (None, 'zoomAndPan'), +)) + +attr_val_is_uri = frozenset(( + (None, 'href'), + (None, 'src'), + (None, 'cite'), + (None, 'action'), + (None, 'longdesc'), + (None, 'poster'), + (None, 'background'), + (None, 'datasrc'), + (None, 'dynsrc'), + (None, 'lowsrc'), + (None, 'ping'), + (namespaces['xlink'], 'href'), + (namespaces['xml'], 'base'), +)) + +svg_attr_val_allows_ref = frozenset(( + (None, 'clip-path'), + (None, 'color-profile'), + (None, 'cursor'), + (None, 'fill'), + (None, 'filter'), + (None, 'marker'), + (None, 'marker-start'), + (None, 'marker-mid'), + (None, 'marker-end'), + (None, 'mask'), + (None, 'stroke'), +)) + +svg_allow_local_href = frozenset(( + (None, 'altGlyph'), + (None, 'animate'), + (None, 'animateColor'), + (None, 'animateMotion'), + (None, 'animateTransform'), + (None, 'cursor'), + (None, 'feImage'), + (None, 'filter'), + (None, 'linearGradient'), + (None, 'pattern'), + (None, 'radialGradient'), + (None, 'textpath'), + (None, 'tref'), + (None, 'set'), + (None, 'use') +)) + +allowed_css_properties = frozenset(( + 'azimuth', + 'background-color', + 'border-bottom-color', + 'border-collapse', + 'border-color', + 'border-left-color', + 'border-right-color', + 'border-top-color', + 'clear', + 'color', + 'cursor', + 'direction', + 'display', + 'elevation', + 'float', + 'font', + 'font-family', + 'font-size', + 'font-style', + 'font-variant', + 'font-weight', + 'height', + 'letter-spacing', + 'line-height', + 'overflow', + 'pause', + 'pause-after', + 'pause-before', + 'pitch', + 'pitch-range', + 'richness', + 'speak', + 'speak-header', + 'speak-numeral', + 'speak-punctuation', + 'speech-rate', + 'stress', + 'text-align', + 'text-decoration', + 'text-indent', + 'unicode-bidi', + 'vertical-align', + 'voice-family', + 'volume', + 'white-space', + 'width', +)) + +allowed_css_keywords = frozenset(( + 'auto', + 'aqua', + 'black', + 'block', + 'blue', + 'bold', + 'both', + 'bottom', + 'brown', + 'center', + 'collapse', + 'dashed', + 'dotted', + 'fuchsia', + 'gray', + 'green', + '!important', + 'italic', + 'left', + 'lime', + 'maroon', + 'medium', + 'none', + 'navy', + 'normal', + 'nowrap', + 'olive', + 'pointer', + 'purple', + 'red', + 'right', + 'solid', + 'silver', + 'teal', + 'top', + 'transparent', + 'underline', + 'white', + 'yellow', +)) + +allowed_svg_properties = frozenset(( + 'fill', + 'fill-opacity', + 'fill-rule', + 'stroke', + 'stroke-width', + 'stroke-linecap', + 'stroke-linejoin', + 'stroke-opacity', +)) + +allowed_protocols = frozenset(( + 'ed2k', + 'ftp', + 'http', + 'https', + 'irc', + 'mailto', + 'news', + 'gopher', + 'nntp', + 'telnet', + 'webcal', + 'xmpp', + 'callto', + 'feed', + 'urn', + 'aim', + 'rsync', + 'tag', + 'ssh', + 'sftp', + 'rtsp', + 'afs', + 'data', +)) + +allowed_content_types = frozenset(( + 'image/png', + 'image/jpeg', + 'image/gif', + 'image/webp', + 'image/bmp', + 'text/plain', +)) + + +data_content_type = re.compile(r''' + ^ + # Match a content type / + (?P[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+) + # Match any character set and encoding + (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?) + |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?) + # Assume the rest is data + ,.* + $ + ''', + re.VERBOSE) + + +class Filter(base.Filter): + """Sanitizes token stream of XHTML+MathML+SVG and of inline style attributes""" + def __init__(self, + source, + allowed_elements=allowed_elements, + allowed_attributes=allowed_attributes, + allowed_css_properties=allowed_css_properties, + allowed_css_keywords=allowed_css_keywords, + allowed_svg_properties=allowed_svg_properties, + allowed_protocols=allowed_protocols, + allowed_content_types=allowed_content_types, + attr_val_is_uri=attr_val_is_uri, + svg_attr_val_allows_ref=svg_attr_val_allows_ref, + svg_allow_local_href=svg_allow_local_href): + """Creates a Filter + + :arg allowed_elements: set of elements to allow--everything else will + be escaped + + :arg allowed_attributes: set of attributes to allow in + elements--everything else will be stripped + + :arg allowed_css_properties: set of CSS properties to allow--everything + else will be stripped + + :arg allowed_css_keywords: set of CSS keywords to allow--everything + else will be stripped + + :arg allowed_svg_properties: set of SVG properties to allow--everything + else will be removed + + :arg allowed_protocols: set of allowed protocols for URIs + + :arg allowed_content_types: set of allowed content types for ``data`` URIs. + + :arg attr_val_is_uri: set of attributes that have URI values--values + that have a scheme not listed in ``allowed_protocols`` are removed + + :arg svg_attr_val_allows_ref: set of SVG attributes that can have + references + + :arg svg_allow_local_href: set of SVG elements that can have local + hrefs--these are removed + + """ + super(Filter, self).__init__(source) + + warnings.warn(_deprecation_msg, DeprecationWarning) + + self.allowed_elements = allowed_elements + self.allowed_attributes = allowed_attributes + self.allowed_css_properties = allowed_css_properties + self.allowed_css_keywords = allowed_css_keywords + self.allowed_svg_properties = allowed_svg_properties + self.allowed_protocols = allowed_protocols + self.allowed_content_types = allowed_content_types + self.attr_val_is_uri = attr_val_is_uri + self.svg_attr_val_allows_ref = svg_attr_val_allows_ref + self.svg_allow_local_href = svg_allow_local_href + + def __iter__(self): + for token in base.Filter.__iter__(self): + token = self.sanitize_token(token) + if token: + yield token + + # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and + # stripping out all attributes not in ALLOWED_ATTRIBUTES. Style attributes + # are parsed, and a restricted set, specified by ALLOWED_CSS_PROPERTIES and + # ALLOWED_CSS_KEYWORDS, are allowed through. attributes in ATTR_VAL_IS_URI + # are scanned, and only URI schemes specified in ALLOWED_PROTOCOLS are + # allowed. + # + # sanitize_html('') + # => <script> do_nasty_stuff() </script> + # sanitize_html('Click here for $100') + # => Click here for $100 + def sanitize_token(self, token): + + # accommodate filters which use token_type differently + token_type = token["type"] + if token_type in ("StartTag", "EndTag", "EmptyTag"): + name = token["name"] + namespace = token["namespace"] + if ((namespace, name) in self.allowed_elements or + (namespace is None and + (namespaces["html"], name) in self.allowed_elements)): + return self.allowed_token(token) + else: + return self.disallowed_token(token) + elif token_type == "Comment": + pass + else: + return token + + def allowed_token(self, token): + if "data" in token: + attrs = token["data"] + attr_names = set(attrs.keys()) + + # Remove forbidden attributes + for to_remove in (attr_names - self.allowed_attributes): + del token["data"][to_remove] + attr_names.remove(to_remove) + + # Remove attributes with disallowed URL values + for attr in (attr_names & self.attr_val_is_uri): + assert attr in attrs + # I don't have a clue where this regexp comes from or why it matches those + # characters, nor why we call unescape. I just know it's always been here. + # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all + # this will do is remove *more* than it otherwise would. + val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '', + unescape(attrs[attr])).lower() + # remove replacement characters from unescaped characters + val_unescaped = val_unescaped.replace("\ufffd", "") + try: + uri = urlparse.urlparse(val_unescaped) + except ValueError: + uri = None + del attrs[attr] + if uri and uri.scheme: + if uri.scheme not in self.allowed_protocols: + del attrs[attr] + if uri.scheme == 'data': + m = data_content_type.match(uri.path) + if not m: + del attrs[attr] + elif m.group('content_type') not in self.allowed_content_types: + del attrs[attr] + + for attr in self.svg_attr_val_allows_ref: + if attr in attrs: + attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)', + ' ', + unescape(attrs[attr])) + if (token["name"] in self.svg_allow_local_href and + (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*', + attrs[(namespaces['xlink'], 'href')])): + del attrs[(namespaces['xlink'], 'href')] + if (None, 'style') in attrs: + attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')]) + token["data"] = attrs + return token + + def disallowed_token(self, token): + token_type = token["type"] + if token_type == "EndTag": + token["data"] = "" % token["name"] + elif token["data"]: + assert token_type in ("StartTag", "EmptyTag") + attrs = [] + for (ns, name), v in token["data"].items(): + attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v))) + token["data"] = "<%s%s>" % (token["name"], ''.join(attrs)) + else: + token["data"] = "<%s>" % token["name"] + if token.get("selfClosing"): + token["data"] = token["data"][:-1] + "/>" + + token["type"] = "Characters" + + del token["name"] + return token + + def sanitize_css(self, style): + # disallow urls + style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style) + + # gauntlet + if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style): + return '' + if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style): + return '' + + clean = [] + for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style): + if not value: + continue + if prop.lower() in self.allowed_css_properties: + clean.append(prop + ': ' + value + ';') + elif prop.split('-')[0].lower() in ['background', 'border', 'margin', + 'padding']: + for keyword in value.split(): + if keyword not in self.allowed_css_keywords and \ + not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa + break + else: + clean.append(prop + ': ' + value + ';') + elif prop.lower() in self.allowed_svg_properties: + clean.append(prop + ': ' + value + ';') + + return ' '.join(clean) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/whitespace.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/whitespace.py new file mode 100644 index 0000000..0d12584 --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/filters/whitespace.py @@ -0,0 +1,38 @@ +from __future__ import absolute_import, division, unicode_literals + +import re + +from . import base +from ..constants import rcdataElements, spaceCharacters +spaceCharacters = "".join(spaceCharacters) + +SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) + + +class Filter(base.Filter): + """Collapses whitespace except in pre, textarea, and script elements""" + spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) + + def __iter__(self): + preserve = 0 + for token in base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag" \ + and (preserve or token["name"] in self.spacePreserveElements): + preserve += 1 + + elif type == "EndTag" and preserve: + preserve -= 1 + + elif not preserve and type == "SpaceCharacters" and token["data"]: + # Test on token["data"] above to not introduce spaces where there were not + token["data"] = " " + + elif not preserve and type == "Characters": + token["data"] = collapse_spaces(token["data"]) + + yield token + + +def collapse_spaces(text): + return SPACES_REGEX.sub(' ', text) diff --git a/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/html5parser.py b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/html5parser.py new file mode 100644 index 0000000..d06784f --- /dev/null +++ b/.venv/lib/python3.9/site-packages/pip/_vendor/html5lib/html5parser.py @@ -0,0 +1,2795 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import with_metaclass, viewkeys + +import types + +from . import _inputstream +from . import _tokenizer + +from . import treebuilders +from .treebuilders.base import Marker + +from . import _utils +from .constants import ( + spaceCharacters, asciiUpper2Lower, + specialElements, headingElements, cdataElements, rcdataElements, + tokenTypes, tagTokenTypes, + namespaces, + htmlIntegrationPointElements, mathmlTextIntegrationPointElements, + adjustForeignAttributes as adjustForeignAttributesMap, + adjustMathMLAttributes, adjustSVGAttributes, + E, + _ReparseException +) + + +def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs): + """Parse an HTML document as a string or file-like object into a tree + + :arg doc: the document to parse as a string or file-like object + + :arg treebuilder: the treebuilder to use when parsing + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :returns: parsed tree + + Example: + + >>> from html5lib.html5parser import parse + >>> parse('

This is a doc

') + + + """ + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parse(doc, **kwargs) + + +def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): + """Parse an HTML fragment as a string or file-like object into a tree + + :arg doc: the fragment to parse as a string or file-like object + + :arg container: the container context to parse the fragment in + + :arg treebuilder: the treebuilder to use when parsing + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :returns: parsed tree + + Example: + + >>> from html5lib.html5libparser import parseFragment + >>> parseFragment('this is a fragment') + + + """ + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parseFragment(doc, container=container, **kwargs) + + +def method_decorator_metaclass(function): + class Decorated(type): + def __new__(meta, classname, bases, classDict): + for attributeName, attribute in classDict.items(): + if isinstance(attribute, types.FunctionType): + attribute = function(attribute) + + classDict[attributeName] = attribute + return type.__new__(meta, classname, bases, classDict) + return Decorated + + +class HTMLParser(object): + """HTML parser + + Generates a tree structure from a stream of (possibly malformed) HTML. + + """ + + def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): + """ + :arg tree: a treebuilder class controlling the type of tree that will be + returned. Built in treebuilders can be accessed through + html5lib.treebuilders.getTreeBuilder(treeType) + + :arg strict: raise an exception when a parse error is encountered + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :arg debug: whether or not to enable debug mode which logs things + + Example: + + >>> from html5lib.html5parser import HTMLParser + >>> parser = HTMLParser() # generates parser with etree builder + >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict + + """ + + # Raise an exception on the first error encountered + self.strict = strict + + if tree is None: + tree = treebuilders.getTreeBuilder("etree") + self.tree = tree(namespaceHTMLElements) + self.errors = [] + + self.phases = {name: cls(self, self.tree) for name, cls in + getPhases(debug).items()} + + def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): + + self.innerHTMLMode = innerHTML + self.container = container + self.scripting = scripting + self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) + self.reset() + + try: + self.mainLoop() + except _ReparseException: + self.reset() + self.mainLoop() + + def reset(self): + self.tree.reset() + self.firstStartTag = False + self.errors = [] + self.log = [] # only used with debug mode + # "quirks" / "limited quirks" / "no quirks" + self.compatMode = "no quirks" + + if self.innerHTMLMode: + self.innerHTML = self.container.lower() + + if self.innerHTML in cdataElements: + self.tokenizer.state = self.tokenizer.rcdataState + elif self.innerHTML in rcdataElements: + self.tokenizer.state = self.tokenizer.rawtextState + elif self.innerHTML == 'plaintext': + self.tokenizer.state = self.tokenizer.plaintextState + else: + # state already is data state + # self.tokenizer.state = self.tokenizer.dataState + pass + self.phase = self.phases["beforeHtml"] + self.phase.insertHtmlElement() + self.resetInsertionMode() + else: + self.innerHTML = False # pylint:disable=redefined-variable-type + self.phase = self.phases["initial"] + + self.lastPhase = None + + self.beforeRCDataPhase = None + + self.framesetOK = True + + @property + def documentEncoding(self): + """Name of the character encoding that was used to decode the input stream, or + :obj:`None` if that is not determined yet + + """ + if not hasattr(self, 'tokenizer'): + return None + return self.tokenizer.stream.charEncoding[0].name + + def isHTMLIntegrationPoint(self, element): + if (element.name == "annotation-xml" and + element.namespace == namespaces["mathml"]): + return ("encoding" in element.attributes and + element.attributes["encoding"].translate( + asciiUpper2Lower) in + ("text/html", "application/xhtml+xml")) + else: + return (element.namespace, element.name) in htmlIntegrationPointElements + + def isMathMLTextIntegrationPoint(self, element): + return (element.namespace, element.name) in mathmlTextIntegrationPointElements + + def mainLoop(self): + CharactersToken = tokenTypes["Characters"] + SpaceCharactersToken = tokenTypes["SpaceCharacters"] + StartTagToken = tokenTypes["StartTag"] + EndTagToken = tokenTypes["EndTag"] + CommentToken = tokenTypes["Comment"] + DoctypeToken = tokenTypes["Doctype"] + ParseErrorToken = tokenTypes["ParseError"] + + for token in self.tokenizer: + prev_token = None + new_token = token + while new_token is not None: + prev_token = new_token + currentNode = self.tree.openElements[-1] if self.tree.openElements else None + currentNodeNamespace = currentNode.namespace if currentNode else None + currentNodeName = currentNode.name if currentNode else None + + type = new_token["type"] + + if type == ParseErrorToken: + self.parseError(new_token["data"], new_token.get("datavars", {})) + new_token = None + else: + if (len(self.tree.openElements) == 0 or + currentNodeNamespace == self.tree.defaultNamespace or + (self.isMathMLTextIntegrationPoint(currentNode) and + ((type == StartTagToken and + token["name"] not in frozenset(["mglyph", "malignmark"])) or + type in (CharactersToken, SpaceCharactersToken))) or + (currentNodeNamespace == namespaces["mathml"] and + currentNodeName == "annotation-xml" and + type == StartTagToken and + token["name"] == "svg") or + (self.isHTMLIntegrationPoint(currentNode) and + type in (StartTagToken, CharactersToken, SpaceCharactersToken))): + phase = self.phase + else: + phase = self.phases["inForeignContent"] + + if type == CharactersToken: + new_token = phase.processCharacters(new_token) + elif type == SpaceCharactersToken: + new_token = phase.processSpaceCharacters(new_token) + elif type == StartTagToken: + new_token = phase.processStartTag(new_token) + elif type == EndTagToken: + new_token = phase.processEndTag(new_token) + elif type == CommentToken: + new_token = phase.processComment(new_token) + elif type == DoctypeToken: + new_token = phase.processDoctype(new_token) + + if (type == StartTagToken and prev_token["selfClosing"] and + not prev_token["selfClosingAcknowledged"]): + self.parseError("non-void-element-with-trailing-solidus", + {"name": prev_token["name"]}) + + # When the loop finishes it's EOF + reprocess = True + phases = [] + while reprocess: + phases.append(self.phase) + reprocess = self.phase.processEOF() + if reprocess: + assert self.phase not in phases + + def parse(self, stream, *args, **kwargs): + """Parse a HTML document into a well-formed tree + + :arg stream: a file-like object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element). + + :arg scripting: treat noscript elements as if JavaScript was turned on + + :returns: parsed tree + + Example: + + >>> from html5lib.html5parser import HTMLParser + >>> parser = HTMLParser() + >>> parser.parse('

This is a doc

') + + + """ + self._parse(stream, False, None, *args, **kwargs) + return self.tree.getDocument() + + def parseFragment(self, stream, *args, **kwargs): + """Parse a HTML fragment into a well-formed tree fragment + + :arg container: name of the element we're setting the innerHTML + property if set to None, default to 'div' + + :arg stream: a file-like object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + :arg scripting: treat noscript elements as if JavaScript was turned on + + :returns: parsed tree + + Example: + + >>> from html5lib.html5libparser import HTMLParser + >>> parser = HTMLParser() + >>> parser.parseFragment('this is a fragment') + + + """ + self._parse(stream, True, *args, **kwargs) + return self.tree.getFragment() + + def parseError(self, errorcode="XXX-undefined-error", datavars=None): + # XXX The idea is to make errorcode mandatory. + if datavars is None: + datavars = {} + self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) + if self.strict: + raise ParseError(E[errorcode] % datavars) + + def adjustMathMLAttributes(self, token): + adjust_attributes(token, adjustMathMLAttributes) + + def adjustSVGAttributes(self, token): + adjust_attributes(token, adjustSVGAttributes) + + def adjustForeignAttributes(self, token): + adjust_attributes(token, adjustForeignAttributesMap) + + def reparseTokenNormal(self, token): + # pylint:disable=unused-argument + self.parser.phase() + + def resetInsertionMode(self): + # The name of this method is mostly historical. (It's also used in the + # specification.) + last = False + newModes = { + "select": "inSelect", + "td": "inCell", + "th": "inCell", + "tr": "inRow", + "tbody": "inTableBody", + "thead": "inTableBody", + "tfoot": "inTableBody", + "caption": "inCaption", + "colgroup": "inColumnGroup", + "table": "inTable", + "head": "inBody", + "body": "inBody", + "frameset": "inFrameset", + "html": "beforeHead" + } + for node in self.tree.openElements[::-1]: + nodeName = node.name + new_phase = None + if node == self.tree.openElements[0]: + assert self.innerHTML + last = True + nodeName = self.innerHTML + # Check for conditions that should only happen in the innerHTML + # case + if nodeName in ("select", "colgroup", "head", "html"): + assert self.innerHTML + + if not last and node.namespace != self.tree.defaultNamespace: + continue + + if nodeName in newModes: + new_phase = self.phases[newModes[nodeName]] + break + elif last: + new_phase = self.phases["inBody"] + break + + self.phase = new_phase + + def parseRCDataRawtext(self, token, contentType): + # Generic RCDATA/RAWTEXT Parsing algorithm + assert contentType in ("RAWTEXT", "RCDATA") + + self.tree.insertElement(token) + + if contentType == "RAWTEXT": + self.tokenizer.state = self.tokenizer.rawtextState + else: + self.tokenizer.state = self.tokenizer.rcdataState + + self.originalPhase = self.phase + + self.phase = self.phases["text"] + + +@_utils.memoize +def getPhases(debug): + def log(function): + """Logger that records which phase processes each token""" + type_names = {value: key for key, value in tokenTypes.items()} + + def wrapped(self, *args, **kwargs): + if function.__name__.startswith("process") and len(args) > 0: + token = args[0] + info = {"type": type_names[token['type']]} + if token['type'] in tagTokenTypes: + info["name"] = token['name'] + + self.parser.log.append((self.parser.tokenizer.state.__name__, + self.parser.phase.__class__.__name__, + self.__class__.__name__, + function.__name__, + info)) + return function(self, *args, **kwargs) + else: + return function(self, *args, **kwargs) + return wrapped + + def getMetaclass(use_metaclass, metaclass_func): + if use_metaclass: + return method_decorator_metaclass(metaclass_func) + else: + return type + + # pylint:disable=unused-argument + class Phase(with_metaclass(getMetaclass(debug, log))): + """Base class for helper object that implements each phase of processing + """ + __slots__ = ("parser", "tree", "__startTagCache", "__endTagCache") + + def __init__(self, parser, tree): + self.parser = parser + self.tree = tree + self.__startTagCache = {} + self.__endTagCache = {} + + def processEOF(self): + raise NotImplementedError + + def processComment(self, token): + # For most phases the following is correct. Where it's not it will be + # overridden. + self.tree.insertComment(token, self.tree.openElements[-1]) + + def processDoctype(self, token): + self.parser.parseError("unexpected-doctype") + + def processCharacters(self, token): + self.tree.insertText(token["data"]) + + def processSpaceCharacters(self, token): + self.tree.insertText(token["data"]) + + def processStartTag(self, token): + # Note the caching is done here rather than BoundMethodDispatcher as doing it there + # requires a circular reference to the Phase, and this ends up with a significant + # (CPython 2.7, 3.8) GC cost when parsing many short inputs + name = token["name"] + # In Py2, using `in` is quicker in general than try/except KeyError + # In Py3, `in` is quicker when there are few cache hits (typically short inputs) + if name in self.__startTagCache: + func = self.__startTagCache[name] + else: + func = self.__startTagCache[name] = self.startTagHandler[name] + # bound the cache size in case we get loads of unknown tags + while len(self.__startTagCache) > len(self.startTagHandler) * 1.1: + # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 + self.__startTagCache.pop(next(iter(self.__startTagCache))) + return func(token) + + def startTagHtml(self, token): + if not self.parser.firstStartTag and token["name"] == "html": + self.parser.parseError("non-html-root") + # XXX Need a check here to see if the first start tag token emitted is + # this token... If it's not, invoke self.parser.parseError(). + for attr, value in token["data"].items(): + if attr not in self.tree.openElements[0].attributes: + self.tree.openElements[0].attributes[attr] = value + self.parser.firstStartTag = False + + def processEndTag(self, token): + # Note the caching is done here rather than BoundMethodDispatcher as doing it there + # requires a circular reference to the Phase, and this ends up with a significant + # (CPython 2.7, 3.8) GC cost when parsing many short inputs + name = token["name"] + # In Py2, using `in` is quicker in general than try/except KeyError + # In Py3, `in` is quicker when there are few cache hits (typically short inputs) + if name in self.__endTagCache: + func = self.__endTagCache[name] + else: + func = self.__endTagCache[name] = self.endTagHandler[name] + # bound the cache size in case we get loads of unknown tags + while len(self.__endTagCache) > len(self.endTagHandler) * 1.1: + # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 + self.__endTagCache.pop(next(iter(self.__endTagCache))) + return func(token) + + class InitialPhase(Phase): + __slots__ = tuple() + + def processSpaceCharacters(self, token): + pass + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + correct = token["correct"] + + if (name != "html" or publicId is not None or + systemId is not None and systemId != "about:legacy-compat"): + self.parser.parseError("unknown-doctype") + + if publicId is None: + publicId = "" + + self.tree.insertDoctype(token) + + if publicId != "": + publicId = publicId.translate(asciiUpper2Lower) + + if (not correct or token["name"] != "html" or + publicId.startswith( + ("+//silmaril//dtd html pro v0r11 19970101//", + "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", + "-//as//dtd html 3.0 aswedit + extensions//", + "-//ietf//dtd html 2.0 level 1//", + "-//ietf//dtd html 2.0 level 2//", + "-//ietf//dtd html 2.0 strict level 1//", + "-//ietf//dtd html 2.0 strict level 2//", + "-//ietf//dtd html 2.0 strict//", + "-//ietf//dtd html 2.0//", + "-//ietf//dtd html 2.1e//", + "-//ietf//dtd html 3.0//", + "-//ietf//dtd html 3.2 final//", + "-//ietf//dtd html 3.2//", + "-//ietf//dtd html 3//", + "-//ietf//dtd html level 0//", + "-//ietf//dtd html level 1//", + "-//ietf//dtd html level 2//", + "-//ietf//dtd html level 3//", + "-//ietf//dtd html strict level 0//", + "-//ietf//dtd html strict level 1//", + "-//ietf//dtd html strict level 2//", + "-//ietf//dtd html strict level 3//", + "-//ietf//dtd html strict//", + "-//ietf//dtd html//", + "-//metrius//dtd metrius presentational//", + "-//microsoft//dtd internet explorer 2.0 html strict//", + "-//microsoft//dtd internet explorer 2.0 html//", + "-//microsoft//dtd internet explorer 2.0 tables//", + "-//microsoft//dtd internet explorer 3.0 html strict//", + "-//microsoft//dtd internet explorer 3.0 html//", + "-//microsoft//dtd internet explorer 3.0 tables//", + "-//netscape comm. corp.//dtd html//", + "-//netscape comm. corp.//dtd strict html//", + "-//o'reilly and associates//dtd html 2.0//", + "-//o'reilly and associates//dtd html extended 1.0//", + "-//o'reilly and associates//dtd html extended relaxed 1.0//", + "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", + "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", + "-//spyglass//dtd html 2.0 extended//", + "-//sq//dtd html 2.0 hotmetal + extensions//", + "-//sun microsystems corp.//dtd hotjava html//", + "-//sun microsystems corp.//dtd hotjava strict html//", + "-//w3c//dtd html 3 1995-03-24//", + "-//w3c//dtd html 3.2 draft//", + "-//w3c//dtd html 3.2 final//", + "-//w3c//dtd html 3.2//", + "-//w3c//dtd html 3.2s draft//", + "-//w3c//dtd html 4.0 frameset//", + "-//w3c//dtd html 4.0 transitional//", + "-//w3c//dtd html experimental 19960712//", + "-//w3c//dtd html experimental 970421//", + "-//w3c//dtd w3 html//", + "-//w3o//dtd w3 html 3.0//", + "-//webtechs//dtd mozilla html 2.0//", + "-//webtechs//dtd mozilla html//")) or + publicId in ("-//w3o//dtd w3 html strict 3.0//en//", + "-/w3c/dtd html 4.0 transitional/en", + "html") or + publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is None or + systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): + self.parser.compatMode = "quirks" + elif (publicId.startswith( + ("-//w3c//dtd xhtml 1.0 frameset//", + "-//w3c//dtd xhtml 1.0 transitional//")) or + publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is not None): + self.parser.compatMode = "limited quirks" + + self.parser.phase = self.parser.phases["beforeHtml"] + + def anythingElse(self): + self.parser.compatMode = "quirks" + self.parser.phase = self.parser.phases["beforeHtml"] + + def processCharacters(self, token): + self.parser.parseError("expected-doctype-but-got-chars") + self.anythingElse() + return token + + def processStartTag(self, token): + self.parser.parseError("expected-doctype-but-got-start-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEndTag(self, token): + self.parser.parseError("expected-doctype-but-got-end-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEOF(self): + self.parser.parseError("expected-doctype-but-got-eof") + self.anythingElse() + return True + + class BeforeHtmlPhase(Phase): + __slots__ = tuple() + + # helper methods + def insertHtmlElement(self): + self.tree.insertRoot(impliedTagToken("html", "StartTag")) + self.parser.phase = self.parser.phases["beforeHead"] + + # other + def processEOF(self): + self.insertHtmlElement() + return True + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.insertHtmlElement() + return token + + def processStartTag(self, token): + if token["name"] == "html": + self.parser.firstStartTag = True + self.insertHtmlElement() + return token + + def processEndTag(self, token): + if token["name"] not in ("head", "body", "html", "br"): + self.parser.parseError("unexpected-end-tag-before-html", + {"name": token["name"]}) + else: + self.insertHtmlElement() + return token + + class BeforeHeadPhase(Phase): + __slots__ = tuple() + + def processEOF(self): + self.startTagHead(impliedTagToken("head", "StartTag")) + return True + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.tree.insertElement(token) + self.tree.headPointer = self.tree.openElements[-1] + self.parser.phase = self.parser.phases["inHead"] + + def startTagOther(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagImplyHead(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagOther(self, token): + self.parser.parseError("end-tag-after-implied-root", + {"name": token["name"]}) + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("head", "body", "html", "br"), endTagImplyHead) + ]) + endTagHandler.default = endTagOther + + class InHeadPhase(Phase): + __slots__ = tuple() + + # the real thing + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.parser.parseError("two-heads-are-not-better-than-one") + + def startTagBaseLinkCommand(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def startTagMeta(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + attributes = token["data"] + if self.parser.tokenizer.stream.charEncoding[1] == "tentative": + if "charset" in attributes: + self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) + elif ("content" in attributes and + "http-equiv" in attributes and + attributes["http-equiv"].lower() == "content-type"): + # Encoding it as UTF-8 here is a hack, as really we should pass + # the abstract Unicode string, and just use the + # ContentAttrParser on that, but using UTF-8 allows all chars + # to be encoded and as a ASCII-superset works. + data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) + parser = _inputstream.ContentAttrParser(data) + codec = parser.parse() + self.parser.tokenizer.stream.changeEncoding(codec) + + def startTagTitle(self, token): + self.parser.parseRCDataRawtext(token, "RCDATA") + + def startTagNoFramesStyle(self, token): + # Need to decide whether to implement the scripting-disabled case + self.parser.parseRCDataRawtext(token, "RAWTEXT") + + def startTagNoscript(self, token): + if self.parser.scripting: + self.parser.parseRCDataRawtext(token, "RAWTEXT") + else: + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inHeadNoscript"] + + def startTagScript(self, token): + self.tree.insertElement(token) + self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState + self.parser.originalPhase = self.parser.phase + self.parser.phase = self.parser.phases["text"] + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHead(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "head", "Expected head got %s" % node.name + self.parser.phase = self.parser.phases["afterHead"] + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.endTagHead(impliedTagToken("head")) + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("title", startTagTitle), + (("noframes", "style"), startTagNoFramesStyle), + ("noscript", startTagNoscript), + ("script", startTagScript), + (("base", "basefont", "bgsound", "command", "link"), + startTagBaseLinkCommand), + ("meta", startTagMeta), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("head", endTagHead), + (("br", "html", "body"), endTagHtmlBodyBr) + ]) + endTagHandler.default = endTagOther + + class InHeadNoscriptPhase(Phase): + __slots__ = tuple() + + def processEOF(self): + self.parser.parseError("eof-in-head-noscript") + self.anythingElse() + return True + + def processComment(self, token): + return self.parser.phases["inHead"].processComment(token) + + def processCharacters(self, token): + self.parser.parseError("char-in-head-noscript") + self.anythingElse() + return token + + def processSpaceCharacters(self, token): + return self.parser.phases["inHead"].processSpaceCharacters(token) + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBaseLinkCommand(self, token): + return self.parser.phases["inHead"].processStartTag(token) + + def startTagHeadNoscript(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) + self.anythingElse() + return token + + def endTagNoscript(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "noscript", "Expected noscript got %s" % node.name + self.parser.phase = self.parser.phases["inHead"] + + def endTagBr(self, token): + self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + # Caller must raise parse error first! + self.endTagNoscript(impliedTagToken("noscript")) + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + (("basefont", "bgsound", "link", "meta", "noframes", "style"), startTagBaseLinkCommand), + (("head", "noscript"), startTagHeadNoscript), + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("noscript", endTagNoscript), + ("br", endTagBr), + ]) + endTagHandler.default = endTagOther + + class AfterHeadPhase(Phase): + __slots__ = tuple() + + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBody(self, token): + self.parser.framesetOK = False + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inBody"] + + def startTagFrameset(self, token): + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inFrameset"] + + def startTagFromHead(self, token): + self.parser.parseError("unexpected-start-tag-out-of-my-head", + {"name": token["name"]}) + self.tree.openElements.append(self.tree.headPointer) + self.parser.phases["inHead"].processStartTag(token) + for node in self.tree.openElements[::-1]: + if node.name == "head": + self.tree.openElements.remove(node) + break + + def startTagHead(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.tree.insertElement(impliedTagToken("body", "StartTag")) + self.parser.phase = self.parser.phases["inBody"] + self.parser.framesetOK = True + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("body", startTagBody), + ("frameset", startTagFrameset), + (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", + "style", "title"), + startTagFromHead), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), + endTagHtmlBodyBr)]) + endTagHandler.default = endTagOther + + class InBodyPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody + # the really-really-really-very crazy mode + __slots__ = ("processSpaceCharacters",) + + def __init__(self, *args, **kwargs): + super(InBodyPhase, self).__init__(*args, **kwargs) + # Set this to the default handler + self.processSpaceCharacters = self.processSpaceCharactersNonPre + + def isMatchingFormattingElement(self, node1, node2): + return (node1.name == node2.name and + node1.namespace == node2.namespace and + node1.attributes == node2.attributes) + + # helper + def addFormattingElement(self, token): + self.tree.insertElement(token) + element = self.tree.openElements[-1] + + matchingElements = [] + for node in self.tree.activeFormattingElements[::-1]: + if node is Marker: + break + elif self.isMatchingFormattingElement(node, element): + matchingElements.append(node) + + assert len(matchingElements) <= 3 + if len(matchingElements) == 3: + self.tree.activeFormattingElements.remove(matchingElements[-1]) + self.tree.activeFormattingElements.append(element) + + # the real deal + def processEOF(self): + allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", + "tfoot", "th", "thead", "tr", "body", + "html")) + for node in self.tree.openElements[::-1]: + if node.name not in allowed_elements: + self.parser.parseError("expected-closing-tag-but-got-eof") + break + # Stop parsing + + def processSpaceCharactersDropNewline(self, token): + # Sometimes (start of
, , and